diff --git a/.claude/commands/clerk-migration.md b/.claude/commands/clerk-migration.md
new file mode 100644
index 0000000..ec2469c
--- /dev/null
+++ b/.claude/commands/clerk-migration.md
@@ -0,0 +1,60 @@
+---
+description: Unified entry point for the Clerk user migration tool. Routes to the right skill based on your task (export, migrate, create transformer, or Clerk-to-Clerk migration).
+---
+
+Help the user with their Clerk user migration task.
+
+## Workflow
+
+### Step 1: Verify Environment
+
+1. Check if dependencies are installed. If not, run `bun install`.
+2. Check if `.env` exists with `CLERK_SECRET_KEY`. If missing, ask for the key (https://dashboard.clerk.com/~/api-keys) and create/update `.env`.
+
+### Step 2: Identify task type
+
+Analyze $ARGUMENTS to determine the task:
+
+| User wants to... | Skill to use |
+| ------------------------------------------------ | ---------------------- |
+| Export users from a source platform | `/export` |
+| Import/migrate users into Clerk from a file | `/migrate` |
+| Create a custom transformer for unsupported data | `/transformer` |
+| Move users between Clerk instances (dev → prod) | `/clerk-migration` |
+| Not sure / general help | Show the options below |
+
+If the task is unclear, present the options:
+
+> I can help with these migration tasks:
+>
+> 1. **Export users** — Export from Auth0, AuthJS, Better Auth, Clerk, Firebase, or Supabase (`/export`)
+> 2. **Migrate users** — Import users into Clerk from a JSON/CSV file (`/migrate`)
+> 3. **Create a transformer** — Generate a custom transformer for unsupported data formats (`/transformer`)
+> 4. **Clerk-to-Clerk migration** — Move users between Clerk instances, e.g. dev → prod (`/clerk-migration`)
+>
+> What would you like to do?
+
+### Step 3: Load the appropriate skill
+
+Based on the identified task, load the skill:
+
+- **Export**: `skill({ name: 'export' })`
+- **Migrate**: `skill({ name: 'migrate' })`
+- **Transformer**: `skill({ name: 'transformer' })`
+- **Clerk-to-Clerk**: `skill({ name: 'clerk-migration' })`
+
+### Step 4: Execute task
+
+Follow the loaded skill's instructions to complete the user's request.
+
+### Step 5: Summarize
+
+After the task completes, summarize what was done:
+
+- Number of users exported/migrated
+- Any errors or warnings from the logs
+- Suggested next steps (e.g., "run `bun migrate`" after an export)
+
+
+$ARGUMENTS
+
diff --git a/.claude/skills/clerk-migration/SKILL.md b/.claude/skills/clerk-migration/SKILL.md
new file mode 100644
index 0000000..978bb45
--- /dev/null
+++ b/.claude/skills/clerk-migration/SKILL.md
@@ -0,0 +1,7 @@
+---
+name: clerk-migration
+description: Migrate users between Clerk instances (development to production or instance to instance). Use when user wants to move users from one Clerk instance to another.
+user-invocable: true
+---
+
+!`cat prompts/clerk-migration-prompt.md`
diff --git a/.claude/skills/export/SKILL.md b/.claude/skills/export/SKILL.md
new file mode 100644
index 0000000..ca87931
--- /dev/null
+++ b/.claude/skills/export/SKILL.md
@@ -0,0 +1,7 @@
+---
+name: export
+description: Export users from authentication platforms (Auth0, AuthJS, Better Auth, Clerk, Firebase, Supabase) to JSON. Use when user wants to export users from their current auth provider.
+user-invocable: true
+---
+
+!`cat prompts/export-prompt.md`
diff --git a/.cursor/rules b/.cursor/rules
deleted file mode 120000
index be77ac8..0000000
--- a/.cursor/rules
+++ /dev/null
@@ -1 +0,0 @@
-../AGENTS.md
\ No newline at end of file
diff --git a/.cursor/rules/agents.mdc b/.cursor/rules/agents.mdc
new file mode 100644
index 0000000..39de0ef
--- /dev/null
+++ b/.cursor/rules/agents.mdc
@@ -0,0 +1,7 @@
+---
+description: Project conventions and architecture for the Clerk user migration tool
+globs:
+alwaysApply: true
+---
+
+Read and follow the project instructions in `AGENTS.md` at the repository root. That file contains the project overview, structure, common commands, architecture, and implementation notes.
diff --git a/.cursor/rules/clerk-migration.mdc b/.cursor/rules/clerk-migration.mdc
new file mode 100644
index 0000000..d7403da
--- /dev/null
+++ b/.cursor/rules/clerk-migration.mdc
@@ -0,0 +1,13 @@
+---
+description: Migrate users between Clerk instances (development to production or instance to instance). Use when user wants to move users from one Clerk instance to another.
+globs:
+alwaysApply: false
+---
+
+Read and follow the instructions in `prompts/clerk-migration-prompt.md` for Clerk-to-Clerk migration.
+
+Typical flow:
+1. Verify environment and detect instance type
+2. Export from source instance (`bun export:clerk`)
+3. Switch `.env` to destination key
+4. Import into destination (`bun migrate -y --transformer clerk --file exports/clerk-export.json`)
diff --git a/.cursor/rules/export.mdc b/.cursor/rules/export.mdc
new file mode 100644
index 0000000..48ccb14
--- /dev/null
+++ b/.cursor/rules/export.mdc
@@ -0,0 +1,11 @@
+---
+description: Export users from authentication platforms (Auth0, AuthJS, Better Auth, Clerk, Firebase, Supabase) to JSON. Use when user wants to export users from their current auth provider.
+globs:
+alwaysApply: false
+---
+
+Read and follow the instructions in `prompts/export-prompt.md` for the complete export workflow.
+
+Key commands:
+- `bun export` — Interactive platform picker
+- `bun export:auth0` / `bun export:authjs` / `bun export:betterauth` / `bun export:clerk` / `bun export:firebase` / `bun export:supabase` — Direct export
diff --git a/.cursor/rules/migrate.mdc b/.cursor/rules/migrate.mdc
new file mode 100644
index 0000000..7bebf2d
--- /dev/null
+++ b/.cursor/rules/migrate.mdc
@@ -0,0 +1,11 @@
+---
+description: Run user migration to Clerk from various authentication platforms (Auth0, Supabase, Firebase, AuthJS, Clerk). Use when user wants to import, migrate, or load users from a data file (JSON/CSV).
+globs:
+alwaysApply: false
+---
+
+Read and follow the instructions in `prompts/migration-prompt.md` for the complete migration workflow.
+
+Key commands:
+- `bun migrate` — Interactive migration
+- `bun migrate -y -t -f ` — Non-interactive migration
diff --git a/.cursor/rules/transformer.mdc b/.cursor/rules/transformer.mdc
new file mode 100644
index 0000000..5d516f6
--- /dev/null
+++ b/.cursor/rules/transformer.mdc
@@ -0,0 +1,13 @@
+---
+description: Generate custom Clerk user transformers from sample data. Use when user needs to create a new transformer for an unsupported platform or custom data format.
+globs:
+alwaysApply: false
+---
+
+Read and follow the instructions in `prompts/transformer-prompt.md` for transformer generation.
+
+Key steps:
+1. Analyze sample user data to identify field mappings
+2. Create transformer file in `src/transformers/`
+3. Register in `src/transformers/registry.ts`
+4. Run `bun run test` to verify
diff --git a/.env.example b/.env.example
index 217320f..3a385e5 100644
--- a/.env.example
+++ b/.env.example
@@ -1,10 +1,20 @@
# ============================================================================
# REQUIRED: Clerk Secret Key
# ============================================================================
-# Get your secret key from the Clerk Dashboard: https://dashboard.clerk.com
+# Get your secret key from the Clerk Dashboard: https://dashboard.clerk.com/~/api-keys
# Format: sk_test_... (development) or sk_live_... (production)
CLERK_SECRET_KEY=sk_
+# ============================================================================
+# OPTIONAL: Clerk Publishable Key
+# ============================================================================
+# Enables automatic Dashboard configuration checking during migration.
+# The tool will cross-reference your Clerk settings (enabled identifiers,
+# password policy, etc.) against the import file and warn about mismatches.
+# Format: pk_test_... (development) or pk_live_... (production)
+# CLERK_PUBLISHABLE_KEY=
+# NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY=
+
# ============================================================================
# OPTIONAL: Rate Limit Override
# ============================================================================
@@ -14,7 +24,7 @@ CLERK_SECRET_KEY=sk_
# - Production (sk_live_*): 100 requests/second (Clerk limit: 1000 req/10s)
# - Development (sk_test_*): 10 requests/second (Clerk limit: 100 req/10s)
#
-# Only set this if you need to reduce the rate for safety or testing, or if
+# Only set this if you need to reduce the rate for safety or testing, or if
# have a rate limit exception and can increase the speed
# Example: RATE_LIMIT=50
# RATE_LIMIT=
@@ -41,3 +51,59 @@ CLERK_SECRET_KEY=sk_
# - CONCURRENCY_LIMIT=5 (slower, ~50 req/s, very safe)
# CONCURRENCY_LIMIT=
+# ============================================================================
+# OPTIONAL: Supabase Export
+# ============================================================================
+# PostgreSQL connection string for exporting users from Supabase.
+# Find this in the Supabase Dashboard under Connect.
+# Format: postgresql://postgres:[PASSWORD]@db.[REF].supabase.co:5432/postgres
+# SUPABASE_DB_URL=
+
+# ============================================================================
+# OPTIONAL: Supabase OAuth Provider Detection
+# ============================================================================
+# Used during Supabase migration to cross-reference which OAuth providers
+# are enabled in Supabase and warn about unsupported providers in Clerk.
+# SUPABASE_URL=
+# NEXT_PUBLIC_SUPABASE_URL=
+# SUPABASE_ANON_KEY=
+# NEXT_PUBLIC_SUPABASE_ANON_KEY=
+# SUPABASE_SERVICE_ROLE_KEY=
+
+# ============================================================================
+# OPTIONAL: Auth0 Export
+# ============================================================================
+# Credentials for exporting users from Auth0 via the Management API.
+# Requires a Machine-to-Machine application with the read:users scope.
+# AUTH0_DOMAIN=my-tenant.us.auth0.com
+# AUTH0_CLIENT_ID=
+# AUTH0_CLIENT_SECRET=
+
+# ============================================================================
+# OPTIONAL: Firebase Export
+# ============================================================================
+# Path to Firebase service account JSON key file.
+# Download from Firebase Console → Project Settings → Service Accounts.
+# GOOGLE_APPLICATION_CREDENTIALS=/path/to/service-account.json
+
+# ============================================================================
+# OPTIONAL: AuthJS Export
+# ============================================================================
+# Database connection string for exporting users from an AuthJS database.
+# Supports PostgreSQL, MySQL, and SQLite.
+# Examples:
+# postgresql://user:password@host:5432/database
+# mysql://user:password@host:3306/database
+# /path/to/database.sqlite
+# AUTHJS_DB_URL=
+
+# ============================================================================
+# OPTIONAL: Better Auth Export
+# ============================================================================
+# Database connection string for exporting users from a Better Auth database.
+# Supports PostgreSQL, MySQL, and SQLite.
+# Examples:
+# postgresql://user:password@host:5432/database
+# mysql://user:password@host:3306/database
+# /path/to/database.sqlite
+# BETTER_AUTH_DB_URL=
diff --git a/.gitignore b/.gitignore
index 0d625e5..b958ee3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,6 +5,7 @@ package-lock.json
yarn.lock
pnpm-lock.yaml
logs
+exports
test-convert-logs
tmp/
testing/
diff --git a/AGENTS.md b/AGENTS.md
index 11a91a8..f8aace9 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -13,6 +13,23 @@ src/
├── clean-logs/ # Log cleanup utility
├── convert-logs/ # NDJSON to JSON converter
├── delete/ # User deletion functionality
+├── export/ # User export (Auth0, AuthJS, Better Auth, Clerk, Firebase, Supabase)
+│ ├── index.ts # Entry point (platform dispatcher)
+│ ├── registry.ts # Export registry (array-based, like transformers)
+│ ├── auth0.ts # Auth0 export (Management API → JSON)
+│ ├── authjs.ts # AuthJS export (DB query → JSON, supports PG/MySQL/SQLite)
+│ ├── betterauth.ts # Better Auth export (DB query → JSON, supports PG/MySQL/SQLite)
+│ ├── clerk.ts # Clerk export (API → JSON)
+│ ├── firebase.ts # Firebase export (Admin SDK → JSON)
+│ └── supabase.ts # Supabase export (Postgres → JSON)
+├── lib/ # Shared utilities and helpers
+│ ├── index.ts # General utils (file paths, tryCatch, transformKeys, etc.)
+│ ├── db.ts # Database abstraction (PostgreSQL, MySQL, SQLite)
+│ ├── export.ts # Shared export utils (coverage display, file writing, DB error hints)
+│ ├── settings.ts # Settings persistence (loadSettings, saveSettings)
+│ ├── analysis.ts # User data analysis (analyzeFields, validateUsers)
+│ ├── supabase.ts # Supabase provider analysis (fetchSupabaseProviders, etc.)
+│ └── clerk.ts # Clerk API helpers (detectInstanceType, fetchClerkConfig)
├── migrate/ # Main migration logic
│ ├── cli.ts # Interactive CLI
│ ├── functions.ts # Data loading and transformation
@@ -20,16 +37,17 @@ src/
│ ├── index.ts # Entry point
│ └── validator.ts # Zod schema validation
├── transformers/ # Platform-specific transformers
+│ ├── index.ts # Re-exports from registry
+│ ├── registry.ts # Transformer registry (array-based, like exports)
│ ├── auth0.ts
│ ├── authjs.ts
+│ ├── betterauth.ts
│ ├── clerk.ts
│ ├── firebase.ts
-│ ├── supabase.ts
-│ └── index.ts
+│ └── supabase.ts
├── envs-constants.ts # Environment configuration
├── logger.ts # NDJSON logging
-├── types.ts # TypeScript types
-└── utils.ts # Shared utilities
+└── types.ts # TypeScript types
```
## Common Commands
@@ -37,6 +55,13 @@ src/
### Development Commands
- `bun migrate` - Start the migration process (interactive CLI)
+- `bun export` - Export users (interactive platform picker)
+- `bun export:auth0` - Export users from Auth0 tenant
+- `bun export:authjs` - Export users from AuthJS database (PostgreSQL, MySQL, or SQLite)
+- `bun export:betterauth` - Export users from Better Auth database (PostgreSQL, MySQL, or SQLite)
+- `bun export:clerk` - Export users from Clerk instance
+- `bun export:firebase` - Export users from Firebase project
+- `bun export:supabase` - Export users from Supabase database
- `bun delete` - Delete all migrated users (uses externalId to identify users)
- `bun clean-logs` - Remove all log files from the `./logs` folder
- `bun convert-logs` - Convert NDJSON log files to JSON array format for easier analysis
@@ -73,7 +98,7 @@ The migration tool uses a **transformer pattern** to support different source pl
1. **Field Transformer**: Maps source platform fields to Clerk's schema
- Example: Auth0's `_id.$oid` → Clerk's `userId`
- Example: Supabase's `encrypted_password` → Clerk's `password`
- - Handles nested field flattening (see `flattenObjectSelectively` in `src/migrate/functions.ts`)
+ - Handles nested field flattening (see `flattenObjectSelectively` in `src/lib/index.ts`)
2. **Optional Default Fields**: Applied to all users from that platform
- Example: Supabase defaults `passwordHasher` to `"bcrypt"`
@@ -88,10 +113,27 @@ The migration tool uses a **transformer pattern** to support different source pl
**Adding a new transformer**:
-1. Create a new file in `src/transformers/` with transformer config
-2. Export it in `src/transformers/index.ts`
+1. Create a new file in `src/transformers/` with a transformer config satisfying `TransformerRegistryEntry`
+2. Import and register it in `src/transformers/registry.ts`
3. The CLI will automatically include it in the platform selection
+### Export System
+
+The export tool (`src/export/`) exports users from various platforms to JSON files compatible with the migration tool.
+
+**Architecture**:
+
+- `registry.ts` — Registry of available exports (array-based, like transformers)
+- `index.ts` — CLI entry point / dispatcher (reads from registry)
+- `src/lib/export.ts` — Shared utilities (coverage display, file writing, DB error hints)
+- `[platform].ts` — Platform-specific export logic
+
+**Adding a new export**:
+
+1. Create `src/export/[platform].ts` with an export function, display summary, and CLI wrapper (`runXxxExport`)
+2. Register in `src/export/registry.ts`
+3. Add `"export:[platform]"` script to `package.json`
+
### Data Flow
```
@@ -148,7 +190,7 @@ The tool uses **p-limit for concurrency control** across all API calls.
- If a 429 occurs, uses Retry-After value from API response
- Falls back to 10 second default if Retry-After not available
-- Centralized in `getRetryDelay()` function in `src/utils.ts`
+- Centralized in `getRetryDelay()` function in `src/lib/index.ts`
- Automatically retries up to 5 times (configurable via MAX_RETRIES)
### Logging System
@@ -157,11 +199,13 @@ All operations create timestamped logs in `./logs/` using NDJSON (Newline-Delimi
- `{timestamp}-migration.log` - Combined log with all import entries
- `{timestamp}-user-deletion.log` - Combined log with all deletion entries
+- `{timestamp}-export.log` - Combined log with all export entries
**Log Entry Types** (defined in `src/types.ts`):
- `ImportLogEntry` - Success/error for user imports
- `DeleteLogEntry` - Success/error for user deletions
+- `ExportLogEntry` - Success/error for user exports
- `ValidationErrorPayload` - Validation failures with path and row
- `ErrorLog` - Additional identifier errors
@@ -169,7 +213,7 @@ All operations create timestamped logs in `./logs/` using NDJSON (Newline-Delimi
The codebase uses a consistent error handling pattern:
-- `tryCatch()` utility (in `src/utils.ts`) - Returns `[result, error]` (error is null on success)
+- `tryCatch()` utility (in `src/lib/index.ts`) - Returns `[result, error]` (error is null on success)
- Used extensively to make additional emails/phones non-fatal
- Rate limit errors (429) trigger automatic retry with delay
- Validation errors are logged but don't stop the migration
@@ -218,3 +262,4 @@ The tool auto-detects instance type from `CLERK_SECRET_KEY`:
- [docs/creating-transformers.md](docs/creating-transformers.md) - Transformer development guide
- [prompts/migration-prompt.md](prompts/migration-prompt.md) - AI prompt for running migrations
- [prompts/transformer-prompt.md](prompts/transformer-prompt.md) - AI prompt for generating transformers
+- [prompts/export-prompt.md](prompts/export-prompt.md) - AI prompt for exporting users
diff --git a/README.md b/README.md
index c8d273f..0e513bc 100644
--- a/README.md
+++ b/README.md
@@ -21,6 +21,7 @@ This repository contains a tool that takes a JSON file as input, containing a li
- [Creating Custom Transformers](docs/creating-transformers.md)
- [AI Migration Prompt](prompts/migration-prompt.md)
- [AI Transformer Generation Prompt](prompts/transformer-prompt.md)
+- [AI Export Prompt](prompts/export-prompt.md)
## Getting Started
@@ -75,7 +76,7 @@ bun migrate
If no key is found, the interactive CLI will prompt you to enter one and optionally save it to a `.env` file.
-You can find your secret key in the [Clerk Dashboard](https://dashboard.clerk.dev/) under **API Keys**.
+You can find your secret key in the [Clerk Dashboard](https://dashboard.clerk.com/~/api-keys) under **API Keys**.
### Run the tool
@@ -111,6 +112,7 @@ bun migrate [OPTIONS]
| `-f, --file ` | Path to the user data file (JSON or CSV) |
| `-r, --resume-after ` | Resume migration after this user ID |
| `--require-password` | Only migrate users who have passwords (by default, users without passwords are migrated) |
+| `--skip-unsupported-providers` | Skip users whose only providers are not enabled in Clerk (Supabase only, no prompt) |
| `-y, --yes` | Non-interactive mode (skip all confirmations) |
| `-h, --help` | Show help message |
@@ -173,12 +175,112 @@ bun migrate -y \
## Exporting Users
-Some platforms require exporting users directly from their database before migrating. See the [Exporting Users](docs/exporting-users.md) guide for setup, CLI options, and troubleshooting.
+The tool supports exporting users from multiple platforms. Exported files are saved to the `exports/` directory.
```bash
+# Interactive platform picker
+bun export
+
+# Export directly from a specific platform
+bun export:auth0
+bun export:authjs
+bun export:betterauth
+bun export:clerk
+bun export:firebase
bun export:supabase
```
+### Auth0 Export
+
+Exports all users from your Auth0 tenant via the Management API. Requires a Machine-to-Machine application with the `read:users` scope.
+
+```bash
+bun export:auth0
+bun export:auth0 -- --domain my-tenant.us.auth0.com --client-id xxx --client-secret xxx
+bun export:auth0 -- --output my-users.json
+```
+
+You can set `AUTH0_DOMAIN`, `AUTH0_CLIENT_ID`, and `AUTH0_CLIENT_SECRET` in your `.env` file to avoid being prompted.
+
+**Note:** Password hashes are not available from the Auth0 Management API. Contact Auth0 support to request a password hash export if you need to migrate passwords.
+
+Output: `exports/auth0-export.json` (default)
+
+### AuthJS Export
+
+Exports all users from an AuthJS (Next-Auth) database. Supports PostgreSQL, MySQL, and SQLite databases.
+
+```bash
+bun export:authjs
+bun export:authjs -- --db-url postgresql://user:password@host:5432/database
+bun export:authjs -- --db-url mysql://user:password@host:3306/database
+bun export:authjs -- --db-url /path/to/database.sqlite
+bun export:authjs -- --output my-users.json
+```
+
+You can set `AUTHJS_DB_URL` in your `.env` file to avoid being prompted for the connection string.
+
+The export reads from the standard AuthJS Prisma adapter tables (`User` and `Account`). If the PascalCase table names are not found, it retries with lowercase names automatically.
+
+Output: `exports/authjs-export.json` (default)
+
+### Better Auth Export
+
+Exports all users from a Better Auth database to a JSON file. Supports PostgreSQL, MySQL, and SQLite databases. The export dynamically detects installed Better Auth plugins (username, phone number, admin, two-factor) and includes those columns when present.
+
+```bash
+bun export:betterauth
+bun export:betterauth -- --db-url postgresql://user:password@host:5432/database
+bun export:betterauth -- --db-url mysql://user:password@host:3306/database
+bun export:betterauth -- --db-url /path/to/database.sqlite
+bun export:betterauth -- --output my-users.json
+```
+
+You can set `BETTER_AUTH_DB_URL` in your `.env` file to avoid being prompted for the connection string.
+
+The export joins the `"user"` table with the `"account"` table (filtered to `providerId = 'credential'`) to include password hashes. If you customized table names in your Better Auth config, you'll need to update the table references in the export module.
+
+Output: `exports/betterauth-export.json` (default)
+
+### Clerk Export
+
+Exports all users from your Clerk instance to a JSON file. Requires `CLERK_SECRET_KEY` in your `.env` file or environment.
+
+```bash
+bun export:clerk
+bun export:clerk -- --output my-users.json
+```
+
+The export includes all fields available from the Clerk API: emails, phones, usernames, names, metadata, and account settings. Note that passwords, TOTP secrets, and backup codes are **not** available from the API — only `passwordEnabled`, `totpEnabled`, and `backupCodeEnabled` booleans are included in the field coverage report.
+
+Output: `exports/clerk-export.json` (default)
+
+### Firebase Export
+
+Exports all users from your Firebase project via the Admin SDK. Requires a service account JSON key file.
+
+```bash
+bun export:firebase
+bun export:firebase -- --service-account /path/to/service-account.json
+bun export:firebase -- --output my-users.json
+```
+
+You can set `GOOGLE_APPLICATION_CREDENTIALS` in your `.env` file to avoid being prompted for the service account path.
+
+The export includes password hashes and salts when available. Ensure you're using a project-level service account for full access.
+
+Output: `exports/firebase-export.json` (default)
+
+### Supabase Export
+
+See the [Exporting Users](docs/exporting-users.md) guide for Supabase setup, CLI options, and troubleshooting.
+
+```bash
+bun export:supabase
+```
+
+Output: `exports/supabase-export.json` (default)
+
## Migrating OAuth Connections
OAuth connections can not be directly migrated. The creation of the connection requires the user to consent, which can't happen on a migration like this. Instead you can rely on Clerk's [Account Linking](https://clerk.com/docs/guides/configure/auth-strategies/social-connections/account-linking) to handle this.
@@ -230,11 +332,14 @@ You could add a column in your user table inside of your database called `ClerkI
The tool can be configured through the following environment variables:
-| Variable | Description |
-| ------------------- | ------------------------------------------------------------------------- |
-| `CLERK_SECRET_KEY` | Your Clerk secret key |
-| `RATE_LIMIT` | Rate limit in requests/second (auto-configured: 100 for prod, 10 for dev) |
-| `CONCURRENCY_LIMIT` | Number of concurrent requests (auto-configured: ~9 for prod, ~1 for dev) |
+| Variable | Description |
+| ------------------------------- | ------------------------------------------------------------------------- |
+| `CLERK_SECRET_KEY` | Your Clerk secret key (required) |
+| `CLERK_PUBLISHABLE_KEY` | Clerk publishable key (enables automatic Dashboard config checking) |
+| `RATE_LIMIT` | Rate limit in requests/second (auto-configured: 100 for prod, 10 for dev) |
+| `CONCURRENCY_LIMIT` | Number of concurrent requests (auto-configured: ~9 for prod, ~1 for dev) |
+| `NEXT_PUBLIC_SUPABASE_URL` | Supabase project URL (enables OAuth provider cross-referencing) |
+| `NEXT_PUBLIC_SUPABASE_ANON_KEY` | Supabase anon key (enables OAuth provider cross-referencing) |
The tool automatically detects production vs development instances from your `CLERK_SECRET_KEY` and sets appropriate rate limits and concurrency:
diff --git a/bun.lock b/bun.lock
index 855b0f2..45de2fe 100644
--- a/bun.lock
+++ b/bun.lock
@@ -8,17 +8,22 @@
"@clerk/backend": "^2.29.3",
"@clerk/nextjs": "^6.37.3",
"@clerk/types": "^4.101.11",
+ "auth0": "^4.14.0",
+ "better-sqlite3": "^11.9.1",
"bun": "^1.3.6",
"csv-parser": "^3.2.0",
"dotenv": "16.6.1",
+ "firebase-admin": "^13.4.0",
"jose": "^6.1.3",
"mime-types": "^3.0.2",
+ "mysql2": "^3.14.1",
"p-limit": "^7.2.0",
"pg": "^8.18.0",
"picocolors": "^1.1.1",
"zod": "^4.3.5",
},
"devDependencies": {
+ "@types/better-sqlite3": "^7.6.13",
"@types/bun": "^1.3.6",
"@types/mime-types": "^3.0.1",
"@types/pg": "^8.16.0",
@@ -121,6 +126,40 @@
"@eslint/plugin-kit": ["@eslint/plugin-kit@0.4.1", "", { "dependencies": { "@eslint/core": "^0.17.0", "levn": "^0.4.1" } }, "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA=="],
+ "@fastify/busboy": ["@fastify/busboy@3.2.0", "", {}, "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA=="],
+
+ "@firebase/app-check-interop-types": ["@firebase/app-check-interop-types@0.3.3", "", {}, "sha512-gAlxfPLT2j8bTI/qfe3ahl2I2YcBQ8cFIBdhAQA4I2f3TndcO+22YizyGYuttLHPQEpWkhmpFW60VCFEPg4g5A=="],
+
+ "@firebase/app-types": ["@firebase/app-types@0.9.3", "", {}, "sha512-kRVpIl4vVGJ4baogMDINbyrIOtOxqhkZQg4jTq3l8Lw6WSk0xfpEYzezFu+Kl4ve4fbPl79dvwRtaFqAC/ucCw=="],
+
+ "@firebase/auth-interop-types": ["@firebase/auth-interop-types@0.2.4", "", {}, "sha512-JPgcXKCuO+CWqGDnigBtvo09HeBs5u/Ktc2GaFj2m01hLarbxthLNm7Fk8iOP1aqAtXV+fnnGj7U28xmk7IwVA=="],
+
+ "@firebase/component": ["@firebase/component@0.7.0", "", { "dependencies": { "@firebase/util": "1.13.0", "tslib": "^2.1.0" } }, "sha512-wR9En2A+WESUHexjmRHkqtaVH94WLNKt6rmeqZhSLBybg4Wyf0Umk04SZsS6sBq4102ZsDBFwoqMqJYj2IoDSg=="],
+
+ "@firebase/database": ["@firebase/database@1.1.0", "", { "dependencies": { "@firebase/app-check-interop-types": "0.3.3", "@firebase/auth-interop-types": "0.2.4", "@firebase/component": "0.7.0", "@firebase/logger": "0.5.0", "@firebase/util": "1.13.0", "faye-websocket": "0.11.4", "tslib": "^2.1.0" } }, "sha512-gM6MJFae3pTyNLoc9VcJNuaUDej0ctdjn3cVtILo3D5lpp0dmUHHLFN/pUKe7ImyeB1KAvRlEYxvIHNF04Filg=="],
+
+ "@firebase/database-compat": ["@firebase/database-compat@2.1.0", "", { "dependencies": { "@firebase/component": "0.7.0", "@firebase/database": "1.1.0", "@firebase/database-types": "1.0.16", "@firebase/logger": "0.5.0", "@firebase/util": "1.13.0", "tslib": "^2.1.0" } }, "sha512-8nYc43RqxScsePVd1qe1xxvWNf0OBnbwHxmXJ7MHSuuTVYFO3eLyLW3PiCKJ9fHnmIz4p4LbieXwz+qtr9PZDg=="],
+
+ "@firebase/database-types": ["@firebase/database-types@1.0.16", "", { "dependencies": { "@firebase/app-types": "0.9.3", "@firebase/util": "1.13.0" } }, "sha512-xkQLQfU5De7+SPhEGAXFBnDryUWhhlFXelEg2YeZOQMCdoe7dL64DDAd77SQsR+6uoXIZY5MB4y/inCs4GTfcw=="],
+
+ "@firebase/logger": ["@firebase/logger@0.5.0", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-cGskaAvkrnh42b3BA3doDWeBmuHFO/Mx5A83rbRDYakPjO9bJtRL3dX7javzc2Rr/JHZf4HlterTW2lUkfeN4g=="],
+
+ "@firebase/util": ["@firebase/util@1.13.0", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-0AZUyYUfpMNcztR5l09izHwXkZpghLgCUaAGjtMwXnCg3bj4ml5VgiwqOMOxJ+Nw4qN/zJAaOQBcJ7KGkWStqQ=="],
+
+ "@google-cloud/firestore": ["@google-cloud/firestore@7.11.6", "", { "dependencies": { "@opentelemetry/api": "^1.3.0", "fast-deep-equal": "^3.1.1", "functional-red-black-tree": "^1.0.1", "google-gax": "^4.3.3", "protobufjs": "^7.2.6" } }, "sha512-EW/O8ktzwLfyWBOsNuhRoMi8lrC3clHM5LVFhGvO1HCsLozCOOXRAlHrYBoE6HL42Sc8yYMuCb2XqcnJ4OOEpw=="],
+
+ "@google-cloud/paginator": ["@google-cloud/paginator@5.0.2", "", { "dependencies": { "arrify": "^2.0.0", "extend": "^3.0.2" } }, "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg=="],
+
+ "@google-cloud/projectify": ["@google-cloud/projectify@4.0.0", "", {}, "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA=="],
+
+ "@google-cloud/promisify": ["@google-cloud/promisify@4.0.0", "", {}, "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g=="],
+
+ "@google-cloud/storage": ["@google-cloud/storage@7.19.0", "", { "dependencies": { "@google-cloud/paginator": "^5.0.0", "@google-cloud/projectify": "^4.0.0", "@google-cloud/promisify": "<4.1.0", "abort-controller": "^3.0.0", "async-retry": "^1.3.3", "duplexify": "^4.1.3", "fast-xml-parser": "^5.3.4", "gaxios": "^6.0.2", "google-auth-library": "^9.6.3", "html-entities": "^2.5.2", "mime": "^3.0.0", "p-limit": "^3.0.1", "retry-request": "^7.0.0", "teeny-request": "^9.0.0", "uuid": "^8.0.0" } }, "sha512-n2FjE7NAOYyshogdc7KQOl/VZb4sneqPjWouSyia9CMDdMhRX5+RIbqalNmC7LOLzuLAN89VlF2HvG8na9G+zQ=="],
+
+ "@grpc/grpc-js": ["@grpc/grpc-js@1.14.3", "", { "dependencies": { "@grpc/proto-loader": "^0.8.0", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA=="],
+
+ "@grpc/proto-loader": ["@grpc/proto-loader@0.7.15", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.2.5", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ=="],
+
"@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="],
"@humanfs/node": ["@humanfs/node@0.16.7", "", { "dependencies": { "@humanfs/core": "^0.19.1", "@humanwhocodes/retry": "^0.4.0" } }, "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ=="],
@@ -181,6 +220,8 @@
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
+ "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="],
+
"@next/env": ["@next/env@16.1.6", "", {}, "sha512-N1ySLuZjnAtN3kFnwhAwPvZah8RJxKasD7x1f8shFqhncnWZn4JMfg37diLNuoHsLAlrDfM3g4mawVdtAG8XLQ=="],
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@16.1.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-wTzYulosJr/6nFnqGW7FrG3jfUUlEf8UjGA0/pyypJl42ExdVgC6xJgcXQ+V8QFn6niSG2Pb8+MIG1mZr2vczw=="],
@@ -199,6 +240,8 @@
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@16.1.6", "", { "os": "win32", "cpu": "x64" }, "sha512-NRfO39AIrzBnixKbjuo2YiYhB6o9d8v/ymU9m/Xk8cyVk+k7XylniXkHwjs4s70wedVffc6bQNbufk5v0xEm0A=="],
+ "@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
+
"@oven/bun-darwin-aarch64": ["@oven/bun-darwin-aarch64@1.3.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-27rypIapNkYboOSylkf1tD9UW9Ado2I+P1NBL46Qz29KmOjTL6WuJ7mHDC5O66CYxlOkF5r93NPDAC3lFHYBXw=="],
"@oven/bun-darwin-x64": ["@oven/bun-darwin-x64@1.3.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-I82xGzPkBxzBKgbl8DsA0RfMQCWTWjNmLjIEkW1ECiv3qK02kHGQ5FGUr/29L/SuvnGsULW4tBTRNZiMzL37nA=="],
@@ -223,6 +266,26 @@
"@pkgr/core": ["@pkgr/core@0.2.9", "", {}, "sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA=="],
+ "@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="],
+
+ "@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="],
+
+ "@protobufjs/codegen": ["@protobufjs/codegen@2.0.4", "", {}, "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="],
+
+ "@protobufjs/eventemitter": ["@protobufjs/eventemitter@1.1.0", "", {}, "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="],
+
+ "@protobufjs/fetch": ["@protobufjs/fetch@1.1.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" } }, "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ=="],
+
+ "@protobufjs/float": ["@protobufjs/float@1.0.2", "", {}, "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="],
+
+ "@protobufjs/inquire": ["@protobufjs/inquire@1.1.0", "", {}, "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="],
+
+ "@protobufjs/path": ["@protobufjs/path@1.1.2", "", {}, "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="],
+
+ "@protobufjs/pool": ["@protobufjs/pool@1.1.0", "", {}, "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="],
+
+ "@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="],
+
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.55.1", "", { "os": "android", "cpu": "arm" }, "sha512-9R0DM/ykwfGIlNu6+2U09ga0WXeZ9MRC2Ter8jnz8415VbuIykVuc6bhdrbORFZANDmTDvq26mJrEVTl8TdnDg=="],
"@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.55.1", "", { "os": "android", "cpu": "arm64" }, "sha512-eFZCb1YUqhTysgW3sj/55du5cG57S7UTNtdMjCW7LwVcj3dTTcowCsC8p7uBdzKsZYa8J7IDE8lhMI+HX1vQvg=="],
@@ -279,8 +342,14 @@
"@swc/helpers": ["@swc/helpers@0.5.15", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g=="],
+ "@tootallnate/once": ["@tootallnate/once@2.0.0", "", {}, "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A=="],
+
+ "@types/better-sqlite3": ["@types/better-sqlite3@7.6.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA=="],
+
"@types/bun": ["@types/bun@1.3.6", "", { "dependencies": { "bun-types": "1.3.6" } }, "sha512-uWCv6FO/8LcpREhenN1d1b6fcspAB+cefwD7uti8C8VffIv0Um08TKMn98FynpTiU38+y2dUO55T11NgDt8VAA=="],
+ "@types/caseless": ["@types/caseless@0.12.5", "", {}, "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg=="],
+
"@types/chai": ["@types/chai@5.2.3", "", { "dependencies": { "@types/deep-eql": "*", "assertion-error": "^2.0.1" } }, "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA=="],
"@types/deep-eql": ["@types/deep-eql@4.0.2", "", {}, "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw=="],
@@ -289,12 +358,22 @@
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
+ "@types/jsonwebtoken": ["@types/jsonwebtoken@9.0.10", "", { "dependencies": { "@types/ms": "*", "@types/node": "*" } }, "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA=="],
+
+ "@types/long": ["@types/long@4.0.2", "", {}, "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA=="],
+
"@types/mime-types": ["@types/mime-types@3.0.1", "", {}, "sha512-xRMsfuQbnRq1Ef+C+RKaENOxXX87Ygl38W1vDfPHRku02TgQr+Qd8iivLtAMcR0KF5/29xlnFihkTlbqFrGOVQ=="],
+ "@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
+
"@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="],
"@types/pg": ["@types/pg@8.16.0", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ=="],
+ "@types/request": ["@types/request@2.48.13", "", { "dependencies": { "@types/caseless": "*", "@types/node": "*", "@types/tough-cookie": "*", "form-data": "^2.5.5" } }, "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg=="],
+
+ "@types/tough-cookie": ["@types/tough-cookie@4.0.5", "", {}, "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA=="],
+
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.53.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.12.2", "@typescript-eslint/scope-manager": "8.53.1", "@typescript-eslint/type-utils": "8.53.1", "@typescript-eslint/utils": "8.53.1", "@typescript-eslint/visitor-keys": "8.53.1", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.53.1", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-cFYYFZ+oQFi6hUnBTbLRXfTJiaQtYE3t4O692agbBl+2Zy+eqSKWtPjhPXJu1G7j4RLjKgeJPDdq3EqOwmX5Ag=="],
"@typescript-eslint/parser": ["@typescript-eslint/parser@8.53.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.53.1", "@typescript-eslint/types": "8.53.1", "@typescript-eslint/typescript-estree": "8.53.1", "@typescript-eslint/visitor-keys": "8.53.1", "debug": "^4.4.3" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } }, "sha512-nm3cvFN9SqZGXjmw5bZ6cGmvJSyJPn0wU9gHAZZHDnZl2wF9PhHv78Xf06E0MaNk4zLVHL8hb2/c32XvyJOLQg=="],
@@ -329,10 +408,14 @@
"@vitest/utils": ["@vitest/utils@4.0.17", "", { "dependencies": { "@vitest/pretty-format": "4.0.17", "tinyrainbow": "^3.0.3" } }, "sha512-RG6iy+IzQpa9SB8HAFHJ9Y+pTzI+h8553MrciN9eC6TFBErqrQaTas4vG+MVj8S4uKk8uTT2p0vgZPnTdxd96w=="],
+ "abort-controller": ["abort-controller@3.0.0", "", { "dependencies": { "event-target-shim": "^5.0.0" } }, "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg=="],
+
"acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="],
"acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="],
+ "agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="],
+
"ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="],
"ansi-escapes": ["ansi-escapes@7.2.0", "", { "dependencies": { "environment": "^1.0.0" } }, "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw=="],
@@ -343,20 +426,46 @@
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
+ "arrify": ["arrify@2.0.1", "", {}, "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug=="],
+
"assertion-error": ["assertion-error@2.0.1", "", {}, "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA=="],
+ "async-retry": ["async-retry@1.3.3", "", { "dependencies": { "retry": "0.13.1" } }, "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw=="],
+
+ "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
+
+ "auth0": ["auth0@4.37.0", "", { "dependencies": { "jose": "^4.13.2", "undici-types": "^6.15.0", "uuid": "^9.0.0" } }, "sha512-+TqJRxh4QvbD4TQIYx1ak2vanykQkG/nIZLuR6o8LoQj425gjVG3tFuUbbOeh/nCpP1rnvU0CCV1ChZHYXLU/A=="],
+
+ "aws-ssl-profiles": ["aws-ssl-profiles@1.1.2", "", {}, "sha512-NZKeq9AfyQvEeNlN0zSYAaWrmBffJh3IELMZfRpJVWgrpEbtEpnjvzqBPf+mxoI287JohRDoa+/nsfqqiZmF6g=="],
+
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
+ "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="],
+
"baseline-browser-mapping": ["baseline-browser-mapping@2.9.19", "", { "bin": { "baseline-browser-mapping": "dist/cli.js" } }, "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg=="],
+ "better-sqlite3": ["better-sqlite3@11.10.0", "", { "dependencies": { "bindings": "^1.5.0", "prebuild-install": "^7.1.1" } }, "sha512-EwhOpyXiOEL/lKzHz9AW1msWFNzGc/z+LzeB3/jnFJpxu+th2yqvzsSWas1v9jgs9+xiXJcD5A8CJxAG2TaghQ=="],
+
+ "bignumber.js": ["bignumber.js@9.3.1", "", {}, "sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ=="],
+
+ "bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="],
+
+ "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
+
"brace-expansion": ["brace-expansion@1.1.12", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="],
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
+ "buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
+
+ "buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
+
"bun": ["bun@1.3.6", "", { "optionalDependencies": { "@oven/bun-darwin-aarch64": "1.3.6", "@oven/bun-darwin-x64": "1.3.6", "@oven/bun-darwin-x64-baseline": "1.3.6", "@oven/bun-linux-aarch64": "1.3.6", "@oven/bun-linux-aarch64-musl": "1.3.6", "@oven/bun-linux-x64": "1.3.6", "@oven/bun-linux-x64-baseline": "1.3.6", "@oven/bun-linux-x64-musl": "1.3.6", "@oven/bun-linux-x64-musl-baseline": "1.3.6", "@oven/bun-windows-x64": "1.3.6", "@oven/bun-windows-x64-baseline": "1.3.6" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "x64", "arm64", ], "bin": { "bun": "bin/bun.exe", "bunx": "bin/bunx.exe" } }, "sha512-Tn98GlZVN2WM7+lg/uGn5DzUao37Yc0PUz7yzYHdeF5hd+SmHQGbCUIKE4Sspdgtxn49LunK3mDNBC2Qn6GJjw=="],
"bun-types": ["bun-types@1.3.6", "", { "dependencies": { "@types/node": "*" } }, "sha512-OlFwHcnNV99r//9v5IIOgQ9Uk37gZqrNMCcqEaExdkVq3Avwqok1bJFmvGMCkCE0FqzdY8VMOZpfpR3lwI+CsQ=="],
+ "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
+
"callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="],
"caniuse-lite": ["caniuse-lite@1.0.30001769", "", {}, "sha512-BCfFL1sHijQlBGWBMuJyhZUhzo7wer5sVj9hqekB/7xn0Ypy+pER/edCYQm4exbXj4WiySGp40P8UuTh6w1srg=="],
@@ -365,18 +474,24 @@
"chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="],
+ "chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="],
+
"cli-cursor": ["cli-cursor@5.0.0", "", { "dependencies": { "restore-cursor": "^5.0.0" } }, "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw=="],
"cli-truncate": ["cli-truncate@5.1.1", "", { "dependencies": { "slice-ansi": "^7.1.0", "string-width": "^8.0.0" } }, "sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A=="],
"client-only": ["client-only@0.0.1", "", {}, "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="],
+ "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="],
+
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
"colorette": ["colorette@2.0.20", "", {}, "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="],
+ "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
+
"commander": ["commander@14.0.2", "", {}, "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ=="],
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
@@ -389,22 +504,48 @@
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
+ "decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="],
+
+ "deep-extend": ["deep-extend@0.6.0", "", {}, "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="],
+
"deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="],
+ "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
+
+ "denque": ["denque@2.1.0", "", {}, "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw=="],
+
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="],
+ "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
+
+ "duplexify": ["duplexify@4.1.3", "", { "dependencies": { "end-of-stream": "^1.4.1", "inherits": "^2.0.3", "readable-stream": "^3.1.1", "stream-shift": "^1.0.2" } }, "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA=="],
+
+ "ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="],
+
"emoji-regex": ["emoji-regex@10.6.0", "", {}, "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="],
+ "end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="],
+
"environment": ["environment@1.1.0", "", {}, "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q=="],
+ "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
+
+ "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
+
"es-module-lexer": ["es-module-lexer@1.7.0", "", {}, "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA=="],
+ "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
+
+ "es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="],
+
"esbuild": ["esbuild@0.27.2", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.2", "@esbuild/android-arm": "0.27.2", "@esbuild/android-arm64": "0.27.2", "@esbuild/android-x64": "0.27.2", "@esbuild/darwin-arm64": "0.27.2", "@esbuild/darwin-x64": "0.27.2", "@esbuild/freebsd-arm64": "0.27.2", "@esbuild/freebsd-x64": "0.27.2", "@esbuild/linux-arm": "0.27.2", "@esbuild/linux-arm64": "0.27.2", "@esbuild/linux-ia32": "0.27.2", "@esbuild/linux-loong64": "0.27.2", "@esbuild/linux-mips64el": "0.27.2", "@esbuild/linux-ppc64": "0.27.2", "@esbuild/linux-riscv64": "0.27.2", "@esbuild/linux-s390x": "0.27.2", "@esbuild/linux-x64": "0.27.2", "@esbuild/netbsd-arm64": "0.27.2", "@esbuild/netbsd-x64": "0.27.2", "@esbuild/openbsd-arm64": "0.27.2", "@esbuild/openbsd-x64": "0.27.2", "@esbuild/openharmony-arm64": "0.27.2", "@esbuild/sunos-x64": "0.27.2", "@esbuild/win32-arm64": "0.27.2", "@esbuild/win32-ia32": "0.27.2", "@esbuild/win32-x64": "0.27.2" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw=="],
+ "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
+
"escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="],
"eslint": ["eslint@9.39.2", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.21.1", "@eslint/config-helpers": "^0.4.2", "@eslint/core": "^0.17.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.39.2", "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.4.0", "eslint-visitor-keys": "^4.2.1", "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw=="],
@@ -429,10 +570,18 @@
"esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="],
+ "event-target-shim": ["event-target-shim@5.0.1", "", {}, "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="],
+
"eventemitter3": ["eventemitter3@5.0.4", "", {}, "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw=="],
+ "expand-template": ["expand-template@2.0.3", "", {}, "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg=="],
+
"expect-type": ["expect-type@1.3.0", "", {}, "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA=="],
+ "extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="],
+
+ "farmhash-modern": ["farmhash-modern@1.1.0", "", {}, "sha512-6ypT4XfgqJk/F3Yuv4SX26I3doUjt0GTG4a+JgWxXQpxXzTBq8fPUeGHfcYMMDPHJHm3yPOSjaeBwBGAHWXCdA=="],
+
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
"fast-diff": ["fast-diff@1.3.0", "", {}, "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw=="],
@@ -443,38 +592,100 @@
"fast-sha256": ["fast-sha256@1.3.0", "", {}, "sha512-n11RGP/lrWEFI/bWdygLxhI+pVeo1ZYIVwvvPkW7azl/rOy+F3HYRZ2K5zeE9mmkhQppyv9sQFx0JM9UabnpPQ=="],
+ "fast-xml-parser": ["fast-xml-parser@5.3.7", "", { "dependencies": { "strnum": "^2.1.2" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-JzVLro9NQv92pOM/jTCR6mHlJh2FGwtomH8ZQjhFj/R29P2Fnj38OgPJVtcvYw6SuKClhgYuwUZf5b3rd8u2mA=="],
+
+ "faye-websocket": ["faye-websocket@0.11.4", "", { "dependencies": { "websocket-driver": ">=0.5.1" } }, "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g=="],
+
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
"file-entry-cache": ["file-entry-cache@8.0.0", "", { "dependencies": { "flat-cache": "^4.0.0" } }, "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="],
+ "file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="],
+
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
"find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="],
+ "firebase-admin": ["firebase-admin@13.6.1", "", { "dependencies": { "@fastify/busboy": "^3.0.0", "@firebase/database-compat": "^2.0.0", "@firebase/database-types": "^1.0.6", "@types/node": "^22.8.7", "farmhash-modern": "^1.1.0", "fast-deep-equal": "^3.1.1", "google-auth-library": "^9.14.2", "jsonwebtoken": "^9.0.0", "jwks-rsa": "^3.1.0", "node-forge": "^1.3.1", "uuid": "^11.0.2" }, "optionalDependencies": { "@google-cloud/firestore": "^7.11.0", "@google-cloud/storage": "^7.14.0" } }, "sha512-Zgc6yPtmPxAZo+FoK6LMG6zpSEsoSK8ifIR+IqF4oWuC3uWZU40OjxgfLTSFcsRlj/k/wD66zNv2UiTRreCNSw=="],
+
"flat-cache": ["flat-cache@4.0.1", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" } }, "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="],
"flatted": ["flatted@3.3.3", "", {}, "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="],
+ "form-data": ["form-data@2.5.5", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.35", "safe-buffer": "^5.2.1" } }, "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A=="],
+
+ "fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="],
+
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
+ "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
+
+ "functional-red-black-tree": ["functional-red-black-tree@1.0.1", "", {}, "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g=="],
+
+ "gaxios": ["gaxios@6.7.1", "", { "dependencies": { "extend": "^3.0.2", "https-proxy-agent": "^7.0.1", "is-stream": "^2.0.0", "node-fetch": "^2.6.9", "uuid": "^9.0.1" } }, "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ=="],
+
+ "gcp-metadata": ["gcp-metadata@6.1.1", "", { "dependencies": { "gaxios": "^6.1.1", "google-logging-utils": "^0.0.2", "json-bigint": "^1.0.0" } }, "sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A=="],
+
+ "generate-function": ["generate-function@2.3.1", "", { "dependencies": { "is-property": "^1.0.2" } }, "sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ=="],
+
+ "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="],
+
"get-east-asian-width": ["get-east-asian-width@1.4.0", "", {}, "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q=="],
+ "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="],
+
+ "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
+
+ "github-from-package": ["github-from-package@0.0.0", "", {}, "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="],
+
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
"glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="],
"globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="],
+ "google-auth-library": ["google-auth-library@9.15.1", "", { "dependencies": { "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", "gaxios": "^6.1.1", "gcp-metadata": "^6.1.0", "gtoken": "^7.0.0", "jws": "^4.0.0" } }, "sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng=="],
+
+ "google-gax": ["google-gax@4.6.1", "", { "dependencies": { "@grpc/grpc-js": "^1.10.9", "@grpc/proto-loader": "^0.7.13", "@types/long": "^4.0.0", "abort-controller": "^3.0.0", "duplexify": "^4.0.0", "google-auth-library": "^9.3.0", "node-fetch": "^2.7.0", "object-hash": "^3.0.0", "proto3-json-serializer": "^2.0.2", "protobufjs": "^7.3.2", "retry-request": "^7.0.0", "uuid": "^9.0.1" } }, "sha512-V6eky/xz2mcKfAd1Ioxyd6nmA61gao3n01C+YeuIwu3vzM9EDR6wcVzMSIbLMDXWeoi9SHYctXuKYC5uJUT3eQ=="],
+
+ "google-logging-utils": ["google-logging-utils@0.0.2", "", {}, "sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ=="],
+
+ "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
+
+ "gtoken": ["gtoken@7.1.0", "", { "dependencies": { "gaxios": "^6.0.0", "jws": "^4.0.0" } }, "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw=="],
+
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
+ "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
+
+ "has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="],
+
+ "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
+
+ "html-entities": ["html-entities@2.6.0", "", {}, "sha512-kig+rMn/QOVRvr7c86gQ8lWXq+Hkv6CbAH1hLu+RG338StTpE8Z0b44SDVaqVu7HGKf27frdmUYEs9hTUX/cLQ=="],
+
+ "http-parser-js": ["http-parser-js@0.5.10", "", {}, "sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA=="],
+
+ "http-proxy-agent": ["http-proxy-agent@5.0.0", "", { "dependencies": { "@tootallnate/once": "2", "agent-base": "6", "debug": "4" } }, "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w=="],
+
+ "https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="],
+
"husky": ["husky@9.1.7", "", { "bin": { "husky": "bin.js" } }, "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA=="],
+ "iconv-lite": ["iconv-lite@0.7.2", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw=="],
+
+ "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
+
"ignore": ["ignore@7.0.5", "", {}, "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="],
"import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="],
"imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="],
+ "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
+
+ "ini": ["ini@1.3.8", "", {}, "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="],
+
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
"is-fullwidth-code-point": ["is-fullwidth-code-point@5.1.0", "", { "dependencies": { "get-east-asian-width": "^1.3.1" } }, "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ=="],
@@ -483,6 +694,10 @@
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
+ "is-property": ["is-property@1.0.2", "", {}, "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g=="],
+
+ "is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
+
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
"jose": ["jose@6.1.3", "", {}, "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ=="],
@@ -493,52 +708,116 @@
"js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="],
+ "json-bigint": ["json-bigint@1.0.0", "", { "dependencies": { "bignumber.js": "^9.0.0" } }, "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ=="],
+
"json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="],
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
"json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="],
+ "jsonwebtoken": ["jsonwebtoken@9.0.3", "", { "dependencies": { "jws": "^4.0.1", "lodash.includes": "^4.3.0", "lodash.isboolean": "^3.0.3", "lodash.isinteger": "^4.0.4", "lodash.isnumber": "^3.0.3", "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1", "lodash.once": "^4.0.0", "ms": "^2.1.1", "semver": "^7.5.4" } }, "sha512-MT/xP0CrubFRNLNKvxJ2BYfy53Zkm++5bX9dtuPbqAeQpTVe0MQTFhao8+Cp//EmJp244xt6Drw/GVEGCUj40g=="],
+
+ "jwa": ["jwa@2.0.1", "", { "dependencies": { "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg=="],
+
+ "jwks-rsa": ["jwks-rsa@3.2.2", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.4", "debug": "^4.3.4", "jose": "^4.15.4", "limiter": "^1.1.5", "lru-memoizer": "^2.2.0" } }, "sha512-BqTyEDV+lS8F2trk3A+qJnxV5Q9EqKCBJOPti3W97r7qTympCZjb7h2X6f2kc+0K3rsSTY1/6YG2eaXKoj497w=="],
+
+ "jws": ["jws@4.0.1", "", { "dependencies": { "jwa": "^2.0.1", "safe-buffer": "^5.0.1" } }, "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA=="],
+
"keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="],
"levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="],
+ "limiter": ["limiter@1.1.5", "", {}, "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA=="],
+
"lint-staged": ["lint-staged@16.2.7", "", { "dependencies": { "commander": "^14.0.2", "listr2": "^9.0.5", "micromatch": "^4.0.8", "nano-spawn": "^2.0.0", "pidtree": "^0.6.0", "string-argv": "^0.3.2", "yaml": "^2.8.1" }, "bin": { "lint-staged": "bin/lint-staged.js" } }, "sha512-lDIj4RnYmK7/kXMya+qJsmkRFkGolciXjrsZ6PC25GdTfWOAWetR0ZbsNXRAj1EHHImRSalc+whZFg56F5DVow=="],
"listr2": ["listr2@9.0.5", "", { "dependencies": { "cli-truncate": "^5.0.0", "colorette": "^2.0.20", "eventemitter3": "^5.0.1", "log-update": "^6.1.0", "rfdc": "^1.4.1", "wrap-ansi": "^9.0.0" } }, "sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g=="],
"locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="],
+ "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="],
+
+ "lodash.clonedeep": ["lodash.clonedeep@4.5.0", "", {}, "sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ=="],
+
+ "lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
+
+ "lodash.isboolean": ["lodash.isboolean@3.0.3", "", {}, "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="],
+
+ "lodash.isinteger": ["lodash.isinteger@4.0.4", "", {}, "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="],
+
+ "lodash.isnumber": ["lodash.isnumber@3.0.3", "", {}, "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="],
+
+ "lodash.isplainobject": ["lodash.isplainobject@4.0.6", "", {}, "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="],
+
+ "lodash.isstring": ["lodash.isstring@4.0.1", "", {}, "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="],
+
"lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="],
+ "lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="],
+
"log-update": ["log-update@6.1.0", "", { "dependencies": { "ansi-escapes": "^7.0.0", "cli-cursor": "^5.0.0", "slice-ansi": "^7.1.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" } }, "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w=="],
+ "long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="],
+
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
+ "lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="],
+
+ "lru-memoizer": ["lru-memoizer@2.3.0", "", { "dependencies": { "lodash.clonedeep": "^4.5.0", "lru-cache": "6.0.0" } }, "sha512-GXn7gyHAMhO13WSKrIiNfztwxodVsP8IoZ3XfrJV4yH2x0/OeTO/FIaAHTY5YekdGgW94njfuKmyyt1E0mR6Ug=="],
+
+ "lru.min": ["lru.min@1.1.4", "", {}, "sha512-DqC6n3QQ77zdFpCMASA1a3Jlb64Hv2N2DciFGkO/4L9+q/IpIAuRlKOvCXabtRW6cQf8usbmM6BE/TOPysCdIA=="],
+
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
+ "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
+
"micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
+ "mime": ["mime@3.0.0", "", { "bin": { "mime": "cli.js" } }, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="],
+
"mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="],
"mime-types": ["mime-types@3.0.2", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A=="],
"mimic-function": ["mimic-function@5.0.1", "", {}, "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA=="],
+ "mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="],
+
"minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
+ "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
+
+ "mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="],
+
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
+ "mysql2": ["mysql2@3.17.4", "", { "dependencies": { "aws-ssl-profiles": "^1.1.2", "denque": "^2.1.0", "generate-function": "^2.3.1", "iconv-lite": "^0.7.2", "long": "^5.3.2", "lru.min": "^1.1.4", "named-placeholders": "^1.1.6", "sql-escaper": "^1.3.3" } }, "sha512-RnfuK5tyIuaiPMWOCTTl4vQX/mQXqSA8eoIbwvWccadvPGvh+BYWWVecInMS5s7wcLUkze8LqJzwB/+A4uwuAA=="],
+
+ "named-placeholders": ["named-placeholders@1.1.6", "", { "dependencies": { "lru.min": "^1.1.0" } }, "sha512-Tz09sEL2EEuv5fFowm419c1+a/jSMiBjI9gHxVLrVdbUkkNUUfjsVYs9pVZu5oCon/kmRh9TfLEObFtkVxmY0w=="],
+
"nano-spawn": ["nano-spawn@2.0.0", "", {}, "sha512-tacvGzUY5o2D8CBh2rrwxyNojUsZNU2zjNTzKQrkgGJQTbGAfArVWXSKMBokBeeg6C7OLRGUEyoFlYbfeWQIqw=="],
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
+ "napi-build-utils": ["napi-build-utils@2.0.0", "", {}, "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA=="],
+
"natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="],
"next": ["next@16.1.6", "", { "dependencies": { "@next/env": "16.1.6", "@swc/helpers": "0.5.15", "baseline-browser-mapping": "^2.8.3", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "16.1.6", "@next/swc-darwin-x64": "16.1.6", "@next/swc-linux-arm64-gnu": "16.1.6", "@next/swc-linux-arm64-musl": "16.1.6", "@next/swc-linux-x64-gnu": "16.1.6", "@next/swc-linux-x64-musl": "16.1.6", "@next/swc-win32-arm64-msvc": "16.1.6", "@next/swc-win32-x64-msvc": "16.1.6", "sharp": "^0.34.4" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-hkyRkcu5x/41KoqnROkfTm2pZVbKxvbZRuNvKXLRXxs3VfyO0WhY50TQS40EuKO9SW3rBj/sF3WbVwDACeMZyw=="],
+ "node-abi": ["node-abi@3.87.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-+CGM1L1CgmtheLcBuleyYOn7NWPVu0s0EJH2C4puxgEZb9h8QpR9G2dBfZJOAUhi7VQxuBPMd0hiISWcTyiYyQ=="],
+
+ "node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
+
+ "node-forge": ["node-forge@1.3.3", "", {}, "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg=="],
+
+ "object-hash": ["object-hash@3.0.0", "", {}, "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw=="],
+
"obug": ["obug@2.1.1", "", {}, "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ=="],
+ "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
+
"onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="],
"optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="],
@@ -587,26 +866,48 @@
"postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="],
+ "prebuild-install": ["prebuild-install@7.1.3", "", { "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^2.0.0", "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" }, "bin": { "prebuild-install": "bin.js" } }, "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug=="],
+
"prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="],
"prettier": ["prettier@3.8.0", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-yEPsovQfpxYfgWNhCfECjG5AQaO+K3dp6XERmOepyPDVqcJm+bjyCVO3pmU+nAPe0N5dDvekfGezt/EIiRe1TA=="],
"prettier-linter-helpers": ["prettier-linter-helpers@1.0.1", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-SxToR7P8Y2lWmv/kTzVLC1t/GDI2WGjMwNhLLE9qtH8Q13C+aEmuRlzDst4Up4s0Wc8sF2M+J57iB3cMLqftfg=="],
+ "proto3-json-serializer": ["proto3-json-serializer@2.0.2", "", { "dependencies": { "protobufjs": "^7.2.5" } }, "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ=="],
+
+ "protobufjs": ["protobufjs@7.5.4", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg=="],
+
+ "pump": ["pump@3.0.3", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA=="],
+
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
+ "rc": ["rc@1.2.8", "", { "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" }, "bin": { "rc": "./cli.js" } }, "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw=="],
+
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
"react-dom": ["react-dom@19.2.4", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.4" } }, "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ=="],
+ "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
+
+ "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="],
+
"resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="],
"restore-cursor": ["restore-cursor@5.1.0", "", { "dependencies": { "onetime": "^7.0.0", "signal-exit": "^4.1.0" } }, "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="],
+ "retry": ["retry@0.13.1", "", {}, "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg=="],
+
+ "retry-request": ["retry-request@7.0.2", "", { "dependencies": { "@types/request": "^2.48.8", "extend": "^3.0.2", "teeny-request": "^9.0.0" } }, "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w=="],
+
"rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="],
"rollup": ["rollup@4.55.1", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.55.1", "@rollup/rollup-android-arm64": "4.55.1", "@rollup/rollup-darwin-arm64": "4.55.1", "@rollup/rollup-darwin-x64": "4.55.1", "@rollup/rollup-freebsd-arm64": "4.55.1", "@rollup/rollup-freebsd-x64": "4.55.1", "@rollup/rollup-linux-arm-gnueabihf": "4.55.1", "@rollup/rollup-linux-arm-musleabihf": "4.55.1", "@rollup/rollup-linux-arm64-gnu": "4.55.1", "@rollup/rollup-linux-arm64-musl": "4.55.1", "@rollup/rollup-linux-loong64-gnu": "4.55.1", "@rollup/rollup-linux-loong64-musl": "4.55.1", "@rollup/rollup-linux-ppc64-gnu": "4.55.1", "@rollup/rollup-linux-ppc64-musl": "4.55.1", "@rollup/rollup-linux-riscv64-gnu": "4.55.1", "@rollup/rollup-linux-riscv64-musl": "4.55.1", "@rollup/rollup-linux-s390x-gnu": "4.55.1", "@rollup/rollup-linux-x64-gnu": "4.55.1", "@rollup/rollup-linux-x64-musl": "4.55.1", "@rollup/rollup-openbsd-x64": "4.55.1", "@rollup/rollup-openharmony-arm64": "4.55.1", "@rollup/rollup-win32-arm64-msvc": "4.55.1", "@rollup/rollup-win32-ia32-msvc": "4.55.1", "@rollup/rollup-win32-x64-gnu": "4.55.1", "@rollup/rollup-win32-x64-msvc": "4.55.1", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A=="],
+ "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
+
+ "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
+
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
@@ -623,6 +924,10 @@
"signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="],
+ "simple-concat": ["simple-concat@1.0.1", "", {}, "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="],
+
+ "simple-get": ["simple-get@4.0.1", "", { "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA=="],
+
"sisteransi": ["sisteransi@1.0.5", "", {}, "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg=="],
"slice-ansi": ["slice-ansi@7.1.2", "", { "dependencies": { "ansi-styles": "^6.2.1", "is-fullwidth-code-point": "^5.0.0" } }, "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w=="],
@@ -631,20 +936,32 @@
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
+ "sql-escaper": ["sql-escaper@1.3.3", "", {}, "sha512-BsTCV265VpTp8tm1wyIm1xqQCS+Q9NHx2Sr+WcnUrgLrQ6yiDIvHYJV5gHxsj1lMBy2zm5twLaZao8Jd+S8JJw=="],
+
"stackback": ["stackback@0.0.2", "", {}, "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw=="],
"standardwebhooks": ["standardwebhooks@1.0.0", "", { "dependencies": { "@stablelib/base64": "^1.0.0", "fast-sha256": "^1.3.0" } }, "sha512-BbHGOQK9olHPMvQNHWul6MYlrRTAOKn03rOe4A8O3CLWhNf4YHBqq2HJKKC+sfqpxiBY52pNeesD6jIiLDz8jg=="],
"std-env": ["std-env@3.10.0", "", {}, "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg=="],
+ "stream-events": ["stream-events@1.0.5", "", { "dependencies": { "stubs": "^3.0.0" } }, "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg=="],
+
+ "stream-shift": ["stream-shift@1.0.3", "", {}, "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ=="],
+
"string-argv": ["string-argv@0.3.2", "", {}, "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q=="],
"string-width": ["string-width@8.1.0", "", { "dependencies": { "get-east-asian-width": "^1.3.0", "strip-ansi": "^7.1.0" } }, "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg=="],
+ "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
+
"strip-ansi": ["strip-ansi@7.1.2", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA=="],
"strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="],
+ "strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
+
+ "stubs": ["stubs@3.0.0", "", {}, "sha512-PdHt7hHUJKxvTCgbKX9C1V/ftOcjJQgz8BZwNfV5c4B6dcGqlpelTbJ999jBGZ2jYiPAwcX5dP6oBwVlBlUbxw=="],
+
"styled-jsx": ["styled-jsx@5.1.6", "", { "dependencies": { "client-only": "0.0.1" }, "peerDependencies": { "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0 || ^19.0.0-0" } }, "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA=="],
"supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
@@ -653,6 +970,12 @@
"synckit": ["synckit@0.11.12", "", { "dependencies": { "@pkgr/core": "^0.2.9" } }, "sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ=="],
+ "tar-fs": ["tar-fs@2.1.4", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ=="],
+
+ "tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="],
+
+ "teeny-request": ["teeny-request@9.0.0", "", { "dependencies": { "http-proxy-agent": "^5.0.0", "https-proxy-agent": "^5.0.0", "node-fetch": "^2.6.9", "stream-events": "^1.0.5", "uuid": "^9.0.0" } }, "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g=="],
+
"tinybench": ["tinybench@2.9.0", "", {}, "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg=="],
"tinyexec": ["tinyexec@1.0.2", "", {}, "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg=="],
@@ -663,24 +986,40 @@
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
+ "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
+
"ts-api-utils": ["ts-api-utils@2.4.0", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA=="],
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
+ "tunnel-agent": ["tunnel-agent@0.6.0", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w=="],
+
"type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
- "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
+ "undici-types": ["undici-types@6.23.0", "", {}, "sha512-HN7GeXgBUs1StmY/vf9hIH11LrNI5SfqmFVtxKyp9Dhuf1P1cDSRlS+H1NJDaGOWzlI08q+NmiHgu11Vx6QnhA=="],
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
"use-sync-external-store": ["use-sync-external-store@1.6.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w=="],
+ "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
+
+ "uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="],
+
"vite": ["vite@7.3.1", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA=="],
"vitest": ["vitest@4.0.17", "", { "dependencies": { "@vitest/expect": "4.0.17", "@vitest/mocker": "4.0.17", "@vitest/pretty-format": "4.0.17", "@vitest/runner": "4.0.17", "@vitest/snapshot": "4.0.17", "@vitest/spy": "4.0.17", "@vitest/utils": "4.0.17", "es-module-lexer": "^1.7.0", "expect-type": "^1.2.2", "magic-string": "^0.30.21", "obug": "^2.1.1", "pathe": "^2.0.3", "picomatch": "^4.0.3", "std-env": "^3.10.0", "tinybench": "^2.9.0", "tinyexec": "^1.0.2", "tinyglobby": "^0.2.15", "tinyrainbow": "^3.0.3", "vite": "^6.0.0 || ^7.0.0", "why-is-node-running": "^2.3.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@opentelemetry/api": "^1.9.0", "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", "@vitest/browser-playwright": "4.0.17", "@vitest/browser-preview": "4.0.17", "@vitest/browser-webdriverio": "4.0.17", "@vitest/ui": "4.0.17", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@opentelemetry/api", "@types/node", "@vitest/browser-playwright", "@vitest/browser-preview", "@vitest/browser-webdriverio", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-FQMeF0DJdWY0iOnbv466n/0BudNdKj1l5jYgl5JVTwjSsZSlqyXFt/9+1sEyhR6CLowbZpV7O1sCHrzBhucKKg=="],
+ "webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
+
+ "websocket-driver": ["websocket-driver@0.7.4", "", { "dependencies": { "http-parser-js": ">=0.5.1", "safe-buffer": ">=5.1.0", "websocket-extensions": ">=0.1.1" } }, "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg=="],
+
+ "websocket-extensions": ["websocket-extensions@0.1.4", "", {}, "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg=="],
+
+ "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
+
"which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
"why-is-node-running": ["why-is-node-running@2.3.0", "", { "dependencies": { "siginfo": "^2.0.0", "stackback": "0.0.2" }, "bin": { "why-is-node-running": "cli.js" } }, "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w=="],
@@ -689,10 +1028,20 @@
"wrap-ansi": ["wrap-ansi@9.0.2", "", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww=="],
+ "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
+
"xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="],
+ "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="],
+
+ "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="],
+
"yaml": ["yaml@2.8.2", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A=="],
+ "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="],
+
+ "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
+
"yocto-queue": ["yocto-queue@1.2.2", "", {}, "sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ=="],
"zod": ["zod@4.3.5", "", {}, "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g=="],
@@ -709,24 +1058,78 @@
"@eslint/eslintrc/ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
+ "@google-cloud/storage/p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="],
+
+ "@google-cloud/storage/uuid": ["uuid@8.3.2", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="],
+
+ "@grpc/grpc-js/@grpc/proto-loader": ["@grpc/proto-loader@0.8.0", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.5.3", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ=="],
+
+ "@types/node/undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
+
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
+ "auth0/jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="],
+
+ "cliui/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
+
+ "cliui/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
+
+ "cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
+
"eslint/ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
+ "firebase-admin/@types/node": ["@types/node@22.19.11", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w=="],
+
+ "firebase-admin/uuid": ["uuid@11.1.0", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A=="],
+
+ "form-data/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
+
+ "http-proxy-agent/agent-base": ["agent-base@6.0.2", "", { "dependencies": { "debug": "4" } }, "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ=="],
+
+ "jwks-rsa/jose": ["jose@4.15.9", "", {}, "sha512-1vUQX+IdDMVPj4k8kOxgUqlcK518yluMuGZwqlr44FS1ppZB/5GWh4rZG89erpOBOJjU/OBsnCVFfapsRz6nEA=="],
+
"micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
"p-locate/p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="],
+ "rc/strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="],
+
"slice-ansi/ansi-styles": ["ansi-styles@6.2.3", "", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="],
+ "teeny-request/https-proxy-agent": ["https-proxy-agent@5.0.1", "", { "dependencies": { "agent-base": "6", "debug": "4" } }, "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA=="],
+
"wrap-ansi/ansi-styles": ["ansi-styles@6.2.3", "", {}, "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="],
"wrap-ansi/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="],
+ "yargs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="],
+
+ "@google-cloud/storage/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
+
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="],
+ "cliui/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
+
+ "cliui/string-width/is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="],
+
+ "cliui/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
+
+ "firebase-admin/@types/node/undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
+
+ "form-data/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
+
"p-locate/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
+
+ "teeny-request/https-proxy-agent/agent-base": ["agent-base@6.0.2", "", { "dependencies": { "debug": "4" } }, "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ=="],
+
+ "yargs/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
+
+ "yargs/string-width/is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="],
+
+ "yargs/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
+
+ "yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
}
}
diff --git a/package.json b/package.json
index c677105..a1f8f64 100644
--- a/package.json
+++ b/package.json
@@ -10,7 +10,13 @@
"clean-logs": "bun ./src/clean-logs/index.ts",
"convert-logs": "bun ./src/convert-logs/index.ts",
"delete": "bun ./src/delete/index.ts",
- "export:supabase": "bun ./src/export/index.ts",
+ "export": "bun ./src/export/index.ts",
+ "export:betterauth": "bun ./src/export/index.ts -- --platform betterauth",
+ "export:clerk": "bun ./src/export/index.ts -- --platform clerk",
+ "export:auth0": "bun ./src/export/index.ts -- --platform auth0",
+ "export:authjs": "bun ./src/export/index.ts -- --platform authjs",
+ "export:firebase": "bun ./src/export/index.ts -- --platform firebase",
+ "export:supabase": "bun ./src/export/index.ts -- --platform supabase",
"format": "prettier . --write",
"format:test": "prettier . --check",
"lint": "eslint .",
@@ -36,11 +42,16 @@
"jose": "^6.1.3",
"mime-types": "^3.0.2",
"p-limit": "^7.2.0",
+ "auth0": "^4.14.0",
+ "better-sqlite3": "^11.9.1",
+ "firebase-admin": "^13.4.0",
+ "mysql2": "^3.14.1",
"pg": "^8.18.0",
"picocolors": "^1.1.1",
"zod": "^4.3.5"
},
"devDependencies": {
+ "@types/better-sqlite3": "^7.6.13",
"@types/bun": "^1.3.6",
"@types/mime-types": "^3.0.1",
"@types/pg": "^8.16.0",
diff --git a/prompts/clerk-migration-prompt.md b/prompts/clerk-migration-prompt.md
new file mode 100644
index 0000000..9cee754
--- /dev/null
+++ b/prompts/clerk-migration-prompt.md
@@ -0,0 +1,164 @@
+# AI Prompt for Clerk Dev ↔ Production Migrations
+
+Use this prompt with an AI assistant to migrate users between Clerk instances (e.g., development → production).
+
+---
+
+## Prompt Template
+
+Copy and paste the following prompt:
+
+````
+I want to migrate users between Clerk instances (e.g., from my development instance to production). Please help me export from the source instance and import into the destination instance.
+
+## Instructions
+
+Follow these steps EXACTLY in order. Do NOT skip any steps.
+
+### Step 1: Verify Environment
+
+1. Check if dependencies are installed. If not, use `bun install` to install them.
+2. Check if a `.env` file exists with `CLERK_SECRET_KEY`.
+ - If missing, IMMEDIATELY ask for the key (Clerk Dashboard → API Keys → Secret keys, or https://dashboard.clerk.com/~/api-keys)
+ - Create/update the `.env` file with the provided key
+ - Do NOT proceed until the key is configured
+
+### Step 2: Detect Instance Type
+
+Use the Clerk API to determine the environment type of the current key:
+
+```bash
+curl -s https://api.clerk.com/v1/instance \
+ --header "Authorization: Bearer $CLERK_SECRET_KEY"
+```
+
+Read `environment_type` from the response — it will be `"development"` or `"production"`.
+
+Tell the user which instance type was detected.
+
+### Step 3: Ensure the Source Key Is Active
+
+The **source** instance is the one you are exporting users FROM.
+
+- **If the detected instance is `development`** (the typical case — exporting dev → prod):
+ The source key is already active. Proceed to Step 4.
+
+- **If the detected instance is `production`** (the user wants to export from production, or provided a production key first):
+ Ask the user for their **development** (or other source) secret key. Then:
+ 1. Update `.env`: comment out the current key with a label, and set the new key:
+ ```
+ # Production CLERK_SECRET_KEY (commented out for export step)
+ # CLERK_SECRET_KEY=sk_live_...
+ # Development CLERK_SECRET_KEY (active for export)
+ CLERK_SECRET_KEY=sk_test_...
+ ```
+ 2. Re-run the instance detection from Step 2 to confirm the new key is for the expected environment.
+ 3. Do NOT proceed until confirmed.
+
+### Step 4: Export Users from the Source Instance
+
+Follow the instructions in `prompts/export-prompt.md` to export users, selecting **Clerk** as the platform.
+
+The export command is:
+```bash
+bun export:clerk
+```
+
+This will save users to `exports/clerk-export.json`.
+
+After the export completes, report the user count and field coverage.
+
+### Step 5: Switch to the Destination Key
+
+The **destination** instance is the one you are importing users INTO.
+
+1. Check `.env` for a commented-out key from Step 3 that matches the destination environment.
+ - If found, uncomment it and comment out the current (source) key. Add labels:
+ ```
+ # Development CLERK_SECRET_KEY (commented out after export)
+ # CLERK_SECRET_KEY=sk_test_...
+ # Production CLERK_SECRET_KEY (active for import)
+ CLERK_SECRET_KEY=sk_live_...
+ ```
+ - If NOT found, ask the user for the destination secret key and update `.env`.
+
+2. Re-run the instance detection from Step 2 to confirm the key matches the destination environment.
+3. Do NOT proceed until confirmed.
+
+### Step 6: Import Users into the Destination Instance
+
+Follow the instructions in `prompts/migration-prompt.md` to import users, using:
+- **Transformer**: `clerk`
+- **File**: `exports/clerk-export.json` (or whatever path was produced in Step 4)
+
+The migration command is:
+```bash
+bun migrate -y --transformer clerk --file exports/clerk-export.json
+```
+
+### Step 7: Post-Migration
+
+After migration completes:
+1. Report the number of users successfully migrated
+2. Report any failures or validation errors from the logs
+3. Remind the user:
+ - Verify users in the destination Clerk Dashboard
+ - Passwords, TOTP secrets, and backup codes are NOT available from the Clerk export API — users with passwords will need to reset them
+ - Run `bun delete` if you need to remove migrated users
+ - Check `./logs/` for detailed logs, or run `bun convert-logs` for easier reading
+
+## Important Notes
+
+- **Password limitation**: The Clerk API does not expose password hashes, TOTP secrets, or backup codes. Users who signed in with a password on the source instance will need to use "Forgot password" or another recovery flow on the destination instance.
+- **Rate limits**: Development instances allow ~10 req/s, production allows ~100 req/s. The tool handles rate limiting automatically.
+- **External IDs**: Migrated users get an `externalId` set to their original user ID, which is how `bun delete` identifies them.
+
+````
+
+---
+
+## Typical Flow Summary
+
+```
+1. Verify deps + CLERK_SECRET_KEY
+2. Detect instance type (dev or prod)
+3. Ensure source key is active
+4. Export users (bun export:clerk) ← see prompts/export-prompt.md
+5. Switch .env to destination key
+6. Import users (bun migrate) ← see prompts/migration-prompt.md
+7. Verify migration results
+```
+
+---
+
+## Example Conversation
+
+### Dev → Production (typical)
+
+**User:** Migrate my users from development to production
+
+**AI checks .env, detects development instance:**
+
+> Dependencies are installed. I found `CLERK_SECRET_KEY` in your `.env` file.
+>
+> I checked the instance type and it's a **development** instance. I'll export users from here, then switch to your production key to import.
+>
+> Starting the Clerk export...
+
+**AI runs export, gets 150 users, then asks:**
+
+> Exported 150 users to `exports/clerk-export.json`.
+>
+> Now I need your **production** secret key to import the users. You can find it in the Clerk Dashboard → API Keys → Secret keys (make sure you're viewing the production instance): https://dashboard.clerk.com/~/api-keys
+
+**User:** sk_live_abc123...
+
+**AI updates .env, confirms production, runs migration:**
+
+> Confirmed — this is a **production** instance. I've updated `.env` (your dev key is commented out and labeled).
+>
+> Running the migration...
+>
+> Migration complete! 148 users imported successfully, 2 failed (see logs).
+>
+> Note: Passwords are not available from the Clerk export API. Users who signed in with a password will need to use "Forgot password" on the production instance.
diff --git a/prompts/export-prompt.md b/prompts/export-prompt.md
new file mode 100644
index 0000000..f0349b7
--- /dev/null
+++ b/prompts/export-prompt.md
@@ -0,0 +1,254 @@
+# AI Prompt for Exporting Users
+
+Use this prompt with an AI assistant to export users from your current authentication platform to a JSON file compatible with the migration tool.
+
+---
+
+## Prompt Template
+
+Copy and paste the following prompt, replacing `[PLATFORM]` with your source platform (or leave blank for auto-detection):
+
+````
+I want to export users from my authentication platform so I can migrate them to Clerk. Please help me run the export.
+
+Platform: [PLATFORM or "not sure"]
+
+## Instructions
+
+Follow these steps EXACTLY in order. Do NOT skip any steps.
+
+### Step 1: Verify Environment
+
+Before proceeding:
+1. Check if dependencies are installed. If not, use `bun install` to install them.
+2. Identify the source platform. If the user specified a platform, use it. Otherwise, ask which platform they're exporting from.
+
+### Step 2: Determine Platform and Requirements
+
+Based on the platform, verify the required configuration:
+
+| Platform | Required Configuration |
+|----------|----------------------|
+| **Auth0** | Auth0 domain, client ID, client secret (`AUTH0_DOMAIN`, `AUTH0_CLIENT_ID`, `AUTH0_CLIENT_SECRET` in `.env` or `--domain`, `--client-id`, `--client-secret` flags) |
+| **AuthJS** | Database connection string (`AUTHJS_DB_URL` in `.env` or `--db-url` flag). Supports PostgreSQL, MySQL, and SQLite. |
+| **Better Auth** | Database connection string (`BETTER_AUTH_DB_URL` in `.env` or `--db-url` flag). Supports PostgreSQL, MySQL, and SQLite. |
+| **Clerk** | `CLERK_SECRET_KEY` in `.env` (Clerk Dashboard → API Keys → Secret keys, or https://dashboard.clerk.com/~/api-keys) |
+| **Firebase** | Firebase service account JSON key file (`GOOGLE_APPLICATION_CREDENTIALS` in `.env` or `--service-account` flag) |
+| **Supabase** | PostgreSQL connection string (`SUPABASE_DB_URL` in `.env` or `--db-url` flag) |
+
+If the required configuration is missing:
+- For database platforms: ask for the connection string and suggest setting the environment variable
+- For Clerk: ask for the secret key and create/update `.env`
+
+**DO NOT** proceed until the required configuration is available.
+
+### Step 3: Run the Export
+
+Run the appropriate export command:
+
+```bash
+# Auth0
+bun export:auth0
+
+# AuthJS
+bun export:authjs
+
+# Better Auth
+bun export:betterauth
+
+# Clerk
+bun export:clerk
+
+# Firebase
+bun export:firebase
+
+# Supabase
+bun export:supabase
+
+# Or use the interactive picker
+bun export
+```
+
+For database exports, you can pass the connection string directly:
+```bash
+bun export:authjs -- --db-url postgresql://user:password@host:5432/database
+bun export:betterauth -- --db-url postgresql://user:password@host:5432/database
+bun export:supabase -- --db-url postgresql://postgres:password@db.xxx.supabase.co:5432/postgres
+```
+
+For Auth0, you can pass credentials directly:
+```bash
+bun export:auth0 -- --domain my-tenant.us.auth0.com --client-id xxx --client-secret xxx
+```
+
+For Firebase, you can pass the service account path:
+```bash
+bun export:firebase -- --service-account /path/to/service-account.json
+```
+
+### Step 4: Verify Output and Explain Field Coverage
+
+After the export completes:
+1. Report the number of users exported
+2. Explain the field coverage summary:
+ - Green ● = all users have this field
+ - Yellow ○ = some users have this field
+ - Dim ○ = no users have this field
+3. Note any fields with low coverage that may need attention
+
+### Step 5: Guide to Next Step
+
+Tell the user how to use the exported file with the migration tool:
+```bash
+bun migrate
+```
+
+Then select the appropriate transformer and point to the exported file in the `exports/` directory.
+
+If `CLERK_SECRET_KEY` is not yet configured (for database-only exports), remind the user to set it up before running the migration.
+````
+
+---
+
+## Platform Reference
+
+### Auth0
+
+- **Environment variables**: `AUTH0_DOMAIN`, `AUTH0_CLIENT_ID`, `AUTH0_CLIENT_SECRET`
+- **CLI flags**: `--domain`, `--client-id`, `--client-secret`
+- **Requires**: Auth0 Management API access (Machine-to-Machine app with `read:users` scope)
+- **API used**: `ManagementClient.users.getAll` with pagination (100 users per page)
+- **Note**: Password hashes are NOT available from the Management API. Contact Auth0 support for a password hash export.
+- **Output**: `exports/auth0-export.json`
+
+### AuthJS
+
+- **Environment variable**: `AUTHJS_DB_URL`
+- **CLI flag**: `--db-url`
+- **Requires**: Database access (PostgreSQL, MySQL, or SQLite)
+- **Tables read**: `"User"`, `"Account"` (standard AuthJS Prisma adapter schema)
+- **Table casing**: Tries PascalCase first, falls back to lowercase on failure
+- **Output**: `exports/authjs-export.json`
+
+### Better Auth
+
+- **Environment variable**: `BETTER_AUTH_DB_URL`
+- **CLI flag**: `--db-url`
+- **Requires**: Database access (PostgreSQL, MySQL, or SQLite)
+- **Tables read**: `"user"`, `"account"` (joined on `providerId = 'credential'`)
+- **Plugin detection**: Automatically detects username, phone number, admin, and two-factor plugins
+- **Output**: `exports/betterauth-export.json`
+
+### Clerk
+
+- **Environment variable**: `CLERK_SECRET_KEY`
+- **Requires**: Clerk API access (secret key)
+- **API used**: `getUserList` with pagination (500 users per page)
+- **Note**: Passwords, TOTP secrets, and backup codes are NOT available from the API
+- **Output**: `exports/clerk-export.json`
+
+### Firebase
+
+- **Environment variable**: `GOOGLE_APPLICATION_CREDENTIALS`
+- **CLI flag**: `--service-account`
+- **Requires**: Firebase Admin SDK access (service account JSON key file)
+- **API used**: `admin.auth().listUsers` with pagination (1000 users per page)
+- **Note**: Password hashes and salts are included when available. Use a project-level service account.
+- **Output**: `exports/firebase-export.json`
+
+### Supabase
+
+- **Environment variable**: `SUPABASE_DB_URL`
+- **CLI flag**: `--db-url`
+- **Requires**: PostgreSQL database access (direct or pooler connection)
+- **Table read**: `auth.users`
+- **Note**: Direct connections require the IPv4 add-on; pooler connections work without it
+- **Output**: `exports/supabase-export.json`
+
+---
+
+## Example Conversations
+
+### Example 1: Successful Supabase Export
+
+**User:** I want to export my Supabase users
+
+**AI checks environment:**
+
+> I'll help you export users from Supabase. I need a PostgreSQL connection string to connect to your Supabase database.
+>
+> You can find this in the Supabase Dashboard by clicking the **Connect** button:
+>
+> - **Direct connection** (requires IPv4 add-on): `postgresql://postgres:[PASSWORD]@db.[REF].supabase.co:5432/postgres`
+> - **Pooler connection** (works without IPv4): `postgres://postgres.[REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:6543/postgres`
+>
+> Please provide your connection string, or set `SUPABASE_DB_URL` in your `.env` file.
+
+**User:** postgresql://postgres:mypassword@db.abc123.supabase.co:5432/postgres
+
+**AI runs export:**
+
+> Running the export...
+>
+> ```bash
+> bun export:supabase -- --db-url "postgresql://postgres:mypassword@db.abc123.supabase.co:5432/postgres"
+> ```
+>
+> Export complete! Found 1,247 users:
+>
+> - ● 1247/1247 have email
+> - ● 1247/1247 email confirmed
+> - ● 1247/1247 have password hash
+> - ○ 89/1247 have phone
+> - ○ 1102/1247 have first name
+> - ○ 987/1247 have last name
+>
+> The file was saved to `exports/supabase-export.json`.
+>
+> **Next step:** Run `bun migrate` and select "Supabase" with the file `exports/supabase-export.json`. Make sure `CLERK_SECRET_KEY` is set in your `.env` file first.
+
+---
+
+### Example 2: Missing Environment Variable
+
+**User:** Export my Clerk users
+
+**AI checks environment:**
+
+> To export from Clerk, I need your `CLERK_SECRET_KEY`. I don't see it in the `.env` file.
+>
+> You can find it in the [Clerk Dashboard](https://dashboard.clerk.com/) → API Keys → Secret keys.
+>
+> Please provide your secret key.
+
+**User:** sk_test_abc123...
+
+**AI creates .env and runs export:**
+
+> I've added `CLERK_SECRET_KEY` to your `.env` file. Running the export now...
+
+---
+
+## Post-Export
+
+After exporting, use the file with the migration tool:
+
+```bash
+# Start migration (interactive)
+bun migrate
+
+# Or non-interactive
+bun migrate -y --transformer supabase --file exports/supabase-export.json
+```
+
+### Troubleshooting
+
+**Connection refused / ETIMEDOUT**: For Supabase, try using a pooler connection string instead of a direct connection. Direct connections require the IPv4 add-on.
+
+**Permission denied**: Ensure you're connecting with the `postgres` role, not an application-level role.
+
+**Table does not exist**: For Better Auth, ensure the database has been migrated (`npx @better-auth/cli migrate`). For AuthJS, ensure Prisma migrations have run. For Supabase, ensure Auth is enabled in the Dashboard.
+
+**Auth0 Unauthorized (401/403)**: Verify your Machine-to-Machine application has the `read:users` scope enabled in the Auth0 Dashboard.
+
+**Firebase INVALID_CREDENTIAL**: Ensure your service account JSON file is valid and for the correct project. Download a fresh key from the Firebase Console → Project Settings → Service Accounts.
diff --git a/prompts/migration-prompt.md b/prompts/migration-prompt.md
index 77c3a2a..cf5ad33 100644
--- a/prompts/migration-prompt.md
+++ b/prompts/migration-prompt.md
@@ -42,6 +42,7 @@ Read a sample of the file to understand its structure. Look for signature fields
| **Auth0** | `user_id` (format: "provider\|id"), `email_verified` (boolean), `phone_number`, `phone_verified`, `user_metadata`, `app_metadata`, `given_name`, `family_name` |
| **Firebase** | `localId`, `passwordHash`, `passwordSalt`, `displayName`, `phoneNumber`, `disabled` |
| **Clerk** | `primary_email_address`, `verified_email_addresses`, `password_digest`, `password_hasher`, `primary_phone_number` |
+| **Better Auth** | `user_id` (UUID), `email_verified` (boolean), `password_hash` (bcrypt `$2` prefix), `phone_number`, `phone_number_verified`, `display_username` |
| **AuthJS** | `email_verified`, `name`, `id`, `email` (minimal - may need customization) |
### Step 3A: If a Transformer Matches
@@ -153,6 +154,23 @@ unsafe_metadata → unsafeMetadata
private_metadata → privateMetadata
```
+### Better Auth
+
+```
+user_id → userId
+email → email (routed by email_verified)
+password_hash → password
+username → username
+phone_number → phone (routed by phone_number_verified)
+name → firstName + lastName (split)
+created_at → createdAt
+updated_at → updatedAt
+```
+
+Default: `passwordHasher: "bcrypt"`
+
+**Note:** Better Auth plugin fields (username, phone, admin, two-factor) are automatically detected during export and included when present.
+
### AuthJS (Next-Auth)
```
diff --git a/prompts/transformer-prompt.md b/prompts/transformer-prompt.md
index ad4c8d0..feaf3ca 100644
--- a/prompts/transformer-prompt.md
+++ b/prompts/transformer-prompt.md
@@ -17,7 +17,7 @@ Before proceeding, check if dependencies are installed. If not:
1. Use `bun install` to install dependencies.
Before generating the transformer, check if a `.env` file exists with `CLERK_SECRET_KEY`. If not:
-1. Ask the user to provide their CLERK_SECRET_KEY (found in Clerk Dashboard → API Keys → Secret keys)
+1. Ask the user to provide their CLERK_SECRET_KEY (found in Clerk Dashboard → API Keys → Secret keys, or https://dashboard.clerk.com/~/api-keys)
1. Create the `.env` file with the provided key
1. Continue with the transformer generation without stopping
diff --git a/samples/betterauth.json b/samples/betterauth.json
new file mode 100644
index 0000000..6b6aff9
--- /dev/null
+++ b/samples/betterauth.json
@@ -0,0 +1,112 @@
+[
+ {
+ "user_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
+ "email": "james.smith@gmail.com",
+ "email_verified": true,
+ "name": "James Smith",
+ "password_hash": "$2a$10$N9qo8uLOickgx2ZMRZoMyeIjZAgcfl7p92ldGxad68LJZdL17lhWy",
+ "created_at": "2025-02-09T16:34:22.611Z",
+ "updated_at": "2025-09-14T20:29:42.043Z"
+ },
+ {
+ "user_id": "b2c3d4e5-f6a7-8901-bcde-f12345678901",
+ "email": "emma.johnson@yahoo.com",
+ "email_verified": true,
+ "name": "Emma Johnson",
+ "password_hash": "$2a$10$K7L8M9N0O1P2Q3R4S5T6U7V8W9X0Y1Z2A3B4C5D6E7F8G9H0I1J2",
+ "username": "emmaj",
+ "phone_number": "+14155551234",
+ "phone_number_verified": true,
+ "created_at": "2025-03-15T10:22:38.831Z",
+ "updated_at": "2025-08-20T14:30:00.000Z"
+ },
+ {
+ "user_id": "c3d4e5f6-a7b8-9012-cdef-123456789012",
+ "email": "liam.williams@outlook.com",
+ "email_verified": false,
+ "name": "Liam Williams",
+ "password_hash": null,
+ "created_at": "2025-04-10T21:22:55.401Z",
+ "updated_at": "2025-10-13T00:07:47.153Z"
+ },
+ {
+ "user_id": "d4e5f6a7-b8c9-0123-defa-234567890123",
+ "email": "olivia.brown@hotmail.com",
+ "email_verified": true,
+ "name": "Olivia Brown",
+ "password_hash": "$2a$10$A1B2C3D4E5F6G7H8I9J0K1L2M3N4O5P6Q7R8S9T0U1V2W3X4Y5Z6",
+ "username": "oliviab",
+ "created_at": "2025-05-14T01:30:14.070Z",
+ "updated_at": "2025-08-31T12:56:44.123Z"
+ },
+ {
+ "user_id": "e5f6a7b8-c9d0-1234-efab-345678901234",
+ "email": "noah.jones@proton.me",
+ "email_verified": true,
+ "name": "Noah",
+ "password_hash": "$2a$10$Q1W2E3R4T5Y6U7I8O9P0A1S2D3F4G5H6J7K8L9Z0X1C2V3B4N5M6",
+ "phone_number": "+13784711532",
+ "phone_number_verified": false,
+ "created_at": "2025-06-27T18:37:43.579Z",
+ "updated_at": "2025-10-10T17:22:51.902Z"
+ },
+ {
+ "user_id": "f6a7b8c9-d0e1-2345-fabc-456789012345",
+ "email": "ava.garcia@icloud.com",
+ "email_verified": true,
+ "name": null,
+ "password_hash": "$2a$10$R2T3Y4U5I6O7P8A9S0D1F2G3H4J5K6L7Z8X9C0V1B2N3M4Q5W6E7",
+ "banned": true,
+ "ban_reason": "Spam",
+ "ban_expires": "2026-12-31T23:59:59.000Z",
+ "role": "user",
+ "created_at": "2025-07-10T00:36:12.411Z",
+ "updated_at": "2025-12-01T02:40:50.743Z"
+ },
+ {
+ "user_id": "a7b8c9d0-e1f2-3456-abcd-567890123456",
+ "email": "william.miller@company.io",
+ "email_verified": true,
+ "name": "William James Miller",
+ "password_hash": "$2a$10$X1Y2Z3A4B5C6D7E8F9G0H1I2J3K4L5M6N7O8P9Q0R1S2T3U4V5W6",
+ "username": "willm",
+ "phone_number": "+19863500676",
+ "phone_number_verified": true,
+ "role": "admin",
+ "two_factor_enabled": true,
+ "created_at": "2025-08-04T06:45:01.073Z",
+ "updated_at": "2025-12-25T02:15:55.651Z"
+ },
+ {
+ "user_id": "b8c9d0e1-f2a3-4567-bcde-678901234567",
+ "email": "sophia.davis@startup.co",
+ "email_verified": false,
+ "name": "Sophia Davis",
+ "password_hash": null,
+ "username": "sophiad",
+ "display_username": "SophiaD",
+ "created_at": "2025-09-01T22:42:49.974Z",
+ "updated_at": "2025-09-30T21:35:29.082Z"
+ },
+ {
+ "user_id": "c9d0e1f2-a3b4-5678-cdef-789012345678",
+ "email": "oliver.rodriguez@gmail.com",
+ "email_verified": true,
+ "name": "Oliver Rodriguez",
+ "password_hash": "$2a$10$M1N2O3P4Q5R6S7T8U9V0W1X2Y3Z4A5B6C7D8E9F0G1H2I3J4K5L6",
+ "created_at": "2025-10-04T04:49:46.871Z",
+ "updated_at": "2025-11-25T20:19:24.261Z"
+ },
+ {
+ "user_id": "d0e1f2a3-b4c5-6789-defa-890123456789",
+ "email": "isabella.martinez@yahoo.com",
+ "email_verified": true,
+ "name": "Isabella Martinez",
+ "password_hash": "$2a$10$F1G2H3I4J5K6L7M8N9O0P1Q2R3S4T5U6V7W8X9Y0Z1A2B3C4D5E6",
+ "username": "isabellam",
+ "phone_number": "+15148469027",
+ "phone_number_verified": false,
+ "created_at": "2025-11-01T14:11:24.525Z",
+ "updated_at": "2025-12-15T23:42:14.332Z"
+ }
+]
diff --git a/src/delete/index.ts b/src/delete/index.ts
index 531d1af..32732c8 100644
--- a/src/delete/index.ts
+++ b/src/delete/index.ts
@@ -10,11 +10,12 @@ import {
getFileType,
getRetryDelay,
tryCatch,
-} from '../utils';
+} from '../lib';
+import { normalizeErrorMessage } from '../lib';
+import { loadSettings } from '../lib/settings';
import { env, MAX_RETRIES, RETRY_DELAY_MS } from '../envs-constants';
import { closeAllStreams, deleteErrorLogger, deleteLogger } from '../logger';
import * as fs from 'fs';
-import * as path from 'path';
import csvParser from 'csv-parser';
import pLimit from 'p-limit';
import type { SettingsResult } from '../types';
@@ -33,31 +34,15 @@ let failed = 0;
* @throws Exits the process if .settings file is not found or missing the file property
*/
export const readSettings = (): SettingsResult => {
- const settingsPath = path.join(process.cwd(), '.settings');
-
- if (!fs.existsSync(settingsPath)) {
- p.log.error(
- color.red(
- 'No migration has been performed yet. Unable to find .settings file with migration source.'
- )
- );
- process.exit(1);
- }
-
- const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf-8')) as {
- file?: string;
- key?: string;
- };
-
+ const settings = loadSettings();
if (!settings.file) {
p.log.error(
color.red(
- 'No migration source found in .settings file. Please perform a migration first.'
+ 'No migration has been performed yet. Unable to find .settings file with migration source.'
)
);
process.exit(1);
}
-
return { file: settings.file, key: settings.key };
};
@@ -291,35 +276,6 @@ export const findIntersection = (
// Track error messages and counts
const errorCounts = new Map();
-/**
- * Normalizes error messages by sorting field arrays to group similar errors
- *
- * Example: Converts both:
- * - ["first_name" "last_name"] data doesn't match...
- * - ["last_name" "first_name"] data doesn't match...
- * into: ["first_name" "last_name"] data doesn't match...
- *
- * @param errorMessage - The original error message
- * @returns The normalized error message with sorted field arrays
- */
-export function normalizeErrorMessage(errorMessage: string): string {
- // Match array-like patterns in error messages: ["field1" "field2"]
- const arrayPattern = /\[([^\]]+)\]/g;
-
- return errorMessage.replace(arrayPattern, (_match, fields: string) => {
- // Split by spaces and quotes, filter out empty strings
- const fieldNames = fields
- .split(/["'\s]+/)
- .filter((f: string) => f.trim().length > 0);
-
- // Sort field names alphabetically
- fieldNames.sort();
-
- // Reconstruct the array notation
- return `[${fieldNames.map((f: string) => `"${f}"`).join(' ')}]`;
- });
-}
-
/**
* Deletes a single user from Clerk with retry logic for rate limits
*
diff --git a/src/export/auth0.ts b/src/export/auth0.ts
new file mode 100644
index 0000000..58b197b
--- /dev/null
+++ b/src/export/auth0.ts
@@ -0,0 +1,327 @@
+/**
+ * Auth0 user export module
+ *
+ * Exports users from an Auth0 tenant via the Management API to a JSON file
+ * compatible with the migration script's Auth0 transformer.
+ *
+ * Note: The Auth0 Management API does not return password hashes. Contact
+ * Auth0 support to request a password hash export separately.
+ *
+ * Usage:
+ * bun run export:auth0
+ * bun run export:auth0 -- --domain my-tenant.us.auth0.com --client-id xxx --client-secret xxx
+ *
+ * Environment variables:
+ * AUTH0_DOMAIN - Auth0 tenant domain
+ * AUTH0_CLIENT_ID - M2M app client ID
+ * AUTH0_CLIENT_SECRET - M2M app client secret
+ *
+ * Priority: CLI flags > env vars > interactive prompt
+ */
+import 'dotenv/config';
+import { ManagementClient } from 'auth0';
+import * as p from '@clack/prompts';
+import color from 'picocolors';
+import {
+ displayFieldCoverage,
+ getDateTimeStamp,
+ writeExportOutput,
+} from '../lib';
+import { closeAllStreams, exportLogger } from '../logger';
+import type { BaseExportResult } from '../types';
+
+const PAGE_SIZE = 100;
+
+interface Auth0ExportResult extends BaseExportResult {
+ fieldCoverage: {
+ email: number;
+ username: number;
+ firstName: number;
+ lastName: number;
+ phone: number;
+ password: number;
+ };
+}
+
+/**
+ * Maps an Auth0 user object to the export format expected by the Auth0 transformer.
+ *
+ * Preserves the field names that the Auth0 transformer maps from.
+ *
+ * @param user - An Auth0 user object from the Management API
+ * @returns A record with Auth0 transformer-compatible field names
+ */
+export function mapAuth0UserToExport(
+ user: Record
+): Record {
+ const exported: Record = {};
+
+ if (user.user_id) exported.user_id = user.user_id;
+ if (user.email) exported.email = user.email;
+ if (user.email_verified !== undefined)
+ exported.email_verified = user.email_verified;
+ if (user.username) exported.username = user.username;
+ if (user.given_name) exported.given_name = user.given_name;
+ if (user.family_name) exported.family_name = user.family_name;
+ if (user.phone_number) exported.phone_number = user.phone_number;
+ if (user.phone_verified !== undefined)
+ exported.phone_verified = user.phone_verified;
+ if (
+ user.user_metadata &&
+ Object.keys(user.user_metadata as object).length > 0
+ )
+ exported.user_metadata = user.user_metadata;
+ if (user.app_metadata && Object.keys(user.app_metadata as object).length > 0)
+ exported.app_metadata = user.app_metadata;
+ if (user.created_at) exported.created_at = user.created_at;
+
+ return exported;
+}
+
+/**
+ * Exports all users from an Auth0 tenant to a JSON file.
+ *
+ * Fetches users via the Management API with pagination (100 per page),
+ * maps them to the Auth0 transformer format, and writes to exports/.
+ *
+ * @param domain - Auth0 tenant domain (e.g., my-tenant.us.auth0.com)
+ * @param clientId - M2M application client ID
+ * @param clientSecret - M2M application client secret
+ * @param outputFile - Output file name (written inside exports/ directory)
+ * @returns Export result with user count, output path, and field coverage stats
+ */
+export async function exportAuth0Users(
+ domain: string,
+ clientId: string,
+ clientSecret: string,
+ outputFile: string
+): Promise {
+ const management = new ManagementClient({
+ domain,
+ clientId,
+ clientSecret,
+ });
+
+ const dateTime = getDateTimeStamp();
+ const allUsers: Record[] = [];
+
+ const coverage = {
+ email: 0,
+ username: 0,
+ firstName: 0,
+ lastName: 0,
+ phone: 0,
+ password: 0,
+ };
+
+ let page = 0;
+ let hasMore = true;
+
+ while (hasMore) {
+ const response = await management.users.getAll({
+ page,
+ per_page: PAGE_SIZE,
+ include_totals: true,
+ });
+
+ const users = response.data.users;
+
+ for (const user of users) {
+ const mapped = mapAuth0UserToExport(
+ user as unknown as Record
+ );
+ allUsers.push(mapped);
+
+ if (mapped.email) coverage.email++;
+ if (mapped.username) coverage.username++;
+ if (mapped.given_name) coverage.firstName++;
+ if (mapped.family_name) coverage.lastName++;
+ if (mapped.phone_number) coverage.phone++;
+ // Password hashes are never available from the Management API
+ // coverage.password stays at 0
+
+ exportLogger(
+ {
+ userId:
+ typeof mapped.user_id === 'string'
+ ? mapped.user_id
+ : `row_${allUsers.length}`,
+ status: 'success',
+ },
+ dateTime
+ );
+ }
+
+ hasMore = users.length === PAGE_SIZE;
+ page++;
+ }
+
+ closeAllStreams();
+
+ const outputPath = writeExportOutput(allUsers, outputFile);
+
+ return {
+ userCount: allUsers.length,
+ outputPath,
+ fieldCoverage: coverage,
+ };
+}
+
+/**
+ * Displays the Auth0 export results as a field coverage report.
+ *
+ * @param result - Export result containing user count, output path, and per-field coverage stats
+ */
+export function displayAuth0ExportSummary(result: Auth0ExportResult): void {
+ const { userCount, outputPath, fieldCoverage } = result;
+
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: fieldCoverage.email },
+ { label: 'have username', count: fieldCoverage.username },
+ { label: 'have first name', count: fieldCoverage.firstName },
+ { label: 'have last name', count: fieldCoverage.lastName },
+ { label: 'have phone', count: fieldCoverage.phone },
+ { label: 'have password hash', count: fieldCoverage.password },
+ ],
+ userCount,
+ outputPath
+ );
+
+ p.log.info(
+ color.dim(
+ 'Password hashes are not available from the Auth0 Management API.\n' +
+ 'Contact Auth0 support to request a password hash export.'
+ )
+ );
+}
+
+/**
+ * Parses Auth0-specific CLI flags from process arguments.
+ */
+function parseAuth0Args(): {
+ domain?: string;
+ clientId?: string;
+ clientSecret?: string;
+ output?: string;
+} {
+ const args = process.argv.slice(2);
+ const result: Record = {};
+
+ for (let i = 0; i < args.length; i++) {
+ if (args[i] === '--domain' && args[i + 1]) {
+ result.domain = args[i + 1];
+ i++;
+ } else if (args[i] === '--client-id' && args[i + 1]) {
+ result.clientId = args[i + 1];
+ i++;
+ } else if (args[i] === '--client-secret' && args[i + 1]) {
+ result.clientSecret = args[i + 1];
+ i++;
+ } else if (args[i] === '--output' && args[i + 1]) {
+ result.output = args[i + 1];
+ i++;
+ }
+ }
+
+ return result;
+}
+
+/**
+ * CLI wrapper for the Auth0 export command.
+ *
+ * Resolves credentials from CLI flags, env vars, or interactive prompts,
+ * then exports users to a JSON file.
+ */
+export async function runAuth0Export(): Promise {
+ p.intro(color.bgCyan(color.black('Auth0 User Export')));
+
+ const cliArgs = parseAuth0Args();
+ const env = process.env as Record;
+
+ let domain = cliArgs.domain || env.AUTH0_DOMAIN;
+ let clientId = cliArgs.clientId || env.AUTH0_CLIENT_ID;
+ let clientSecret = cliArgs.clientSecret || env.AUTH0_CLIENT_SECRET;
+ const outputFile = cliArgs.output || 'auth0-export.json';
+
+ // Prompt for missing credentials
+ if (!domain) {
+ const input = await p.text({
+ message: 'Enter your Auth0 domain',
+ placeholder: 'my-tenant.us.auth0.com',
+ validate: (value) => {
+ if (!value || value.trim() === '') {
+ return 'Auth0 domain is required';
+ }
+ },
+ });
+ if (p.isCancel(input)) {
+ p.cancel('Export cancelled.');
+ process.exit(0);
+ }
+ domain = input;
+ }
+
+ if (!clientId) {
+ const input = await p.text({
+ message: 'Enter your Auth0 M2M application Client ID',
+ placeholder: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+ validate: (value) => {
+ if (!value || value.trim() === '') {
+ return 'Client ID is required';
+ }
+ },
+ });
+ if (p.isCancel(input)) {
+ p.cancel('Export cancelled.');
+ process.exit(0);
+ }
+ clientId = input;
+ }
+
+ if (!clientSecret) {
+ const input = await p.text({
+ message: 'Enter your Auth0 M2M application Client Secret',
+ placeholder:
+ 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
+ validate: (value) => {
+ if (!value || value.trim() === '') {
+ return 'Client Secret is required';
+ }
+ },
+ });
+ if (p.isCancel(input)) {
+ p.cancel('Export cancelled.');
+ process.exit(0);
+ }
+ clientSecret = input;
+ }
+
+ const spinner = p.spinner();
+ spinner.start('Fetching users from Auth0...');
+
+ try {
+ const result = await exportAuth0Users(
+ domain,
+ clientId,
+ clientSecret,
+ outputFile
+ );
+ spinner.stop(`Found ${result.userCount} users`);
+
+ displayAuth0ExportSummary(result);
+
+ p.log.info(
+ color.dim(
+ `Next step: run ${color.bold('bun run migrate')} and select "Auth0" with file "exports/${outputFile}"`
+ )
+ );
+
+ p.outro(color.green('Export complete!'));
+ } catch (err) {
+ spinner.stop('Export failed');
+ const message = err instanceof Error ? err.message : String(err);
+ p.log.error(color.red(message));
+ process.exit(1);
+ }
+}
diff --git a/src/export/authjs.ts b/src/export/authjs.ts
new file mode 100644
index 0000000..aba4fa0
--- /dev/null
+++ b/src/export/authjs.ts
@@ -0,0 +1,267 @@
+/**
+ * AuthJS (Next-Auth) user export module
+ *
+ * Connects to a database used by AuthJS (Prisma adapter) and exports users
+ * from the "User" and "Account" tables to a JSON file.
+ *
+ * Supports PostgreSQL, MySQL, and SQLite via the shared database abstraction.
+ *
+ * Core tables (Prisma adapter default names):
+ * - "User" (core user identity)
+ * - "Account" (OAuth providers — joined to detect provider usage)
+ *
+ * Usage:
+ * bun run export:authjs
+ * bun run export:authjs -- --db-url postgresql://... --output users.json
+ *
+ * Environment variables:
+ * AUTHJS_DB_URL - Database connection string (PostgreSQL, MySQL, or SQLite path)
+ *
+ * Priority: --db-url flag > AUTHJS_DB_URL env var > interactive prompt
+ */
+import * as p from '@clack/prompts';
+import color from 'picocolors';
+import { createDbClient, type DbClient } from '../lib/db';
+import {
+ displayFieldCoverage,
+ getDateTimeStamp,
+ getDbConnectionErrorHint,
+ isValidConnectionString,
+ resolveConnectionString,
+ writeExportOutput,
+} from '../lib';
+import { closeAllStreams, exportLogger } from '../logger';
+import type { BaseExportResult } from '../types';
+
+interface AuthJSExportResult extends BaseExportResult {
+ fieldCoverage: {
+ email: number;
+ emailVerified: number;
+ name: number;
+ };
+}
+
+interface AuthJSUserRow {
+ id: string;
+ name: string | null;
+ email: string | null;
+ email_verified: string | boolean | null;
+ [key: string]: unknown;
+}
+
+/**
+ * Builds the export SQL query with correct identifier quoting for the database type.
+ *
+ * @param dbType - The database type (postgres, mysql, sqlite)
+ * @param tableCasing - Whether to use PascalCase or lowercase table names
+ * @returns SQL SELECT query string
+ */
+function buildExportQuery(
+ dbType: 'postgres' | 'mysql' | 'sqlite',
+ tableCasing: 'pascal' | 'lower' = 'pascal'
+): string {
+ if (dbType === 'mysql') {
+ const user = tableCasing === 'pascal' ? '`User`' : '`user`';
+ return `SELECT u.\`id\`, u.\`name\`, u.\`email\`, u.\`emailVerified\` AS email_verified FROM ${user} u ORDER BY u.\`id\` ASC`;
+ }
+
+ // PostgreSQL and SQLite use double-quoted identifiers
+ const user = tableCasing === 'pascal' ? '"User"' : '"user"';
+ return `SELECT u."id", u."name", u."email", u."emailVerified" AS email_verified FROM ${user} u ORDER BY u."id" ASC`;
+}
+
+/**
+ * Exports users from an AuthJS database to a JSON file.
+ *
+ * Connects to the database, queries the User table with appropriate
+ * identifier quoting, and writes results to the exports/ directory.
+ *
+ * If the initial query fails with a "does not exist" error, retries
+ * with lowercase table names as a fallback.
+ *
+ * @param dbUrl - Database connection string (PostgreSQL, MySQL, or SQLite path)
+ * @param outputFile - Output file name (written inside exports/ directory)
+ * @returns Export result with user count and field coverage stats
+ */
+export async function exportAuthJSUsers(
+ dbUrl: string,
+ outputFile: string
+): Promise {
+ let client: DbClient;
+
+ try {
+ client = await createDbClient(dbUrl);
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+ const hint = getDbConnectionErrorHint(message, 'authjs');
+ throw new Error(`Failed to connect to database: ${message}\n\n${hint}`);
+ }
+
+ try {
+ let rows: AuthJSUserRow[];
+
+ // Try PascalCase table names first (Prisma adapter default)
+ const query = buildExportQuery(client.dbType, 'pascal');
+
+ try {
+ ({ rows } = await client.query(query));
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+
+ // If table not found, retry with lowercase
+ if (
+ message.includes('does not exist') ||
+ message.includes("doesn't exist") ||
+ message.includes('no such table')
+ ) {
+ const fallbackQuery = buildExportQuery(client.dbType, 'lower');
+ try {
+ ({ rows } = await client.query(fallbackQuery));
+ } catch (retryErr) {
+ const retryMessage =
+ retryErr instanceof Error ? retryErr.message : String(retryErr);
+ throw new Error(
+ `Could not find AuthJS tables: ${retryMessage}\n\n` +
+ 'The AuthJS Prisma adapter creates "User" and "Account" tables.\n' +
+ 'Ensure AuthJS has been set up and the database has been migrated\n' +
+ '(npx prisma migrate deploy).'
+ );
+ }
+ } else if (message.includes('permission denied')) {
+ throw new Error(
+ `Could not read from AuthJS tables: ${message}\n\n` +
+ 'Ensure the database user has SELECT permission on the "User" table.'
+ );
+ } else {
+ throw err;
+ }
+ }
+
+ // Calculate field coverage and log each user
+ const dateTime = getDateTimeStamp();
+ const coverage = {
+ email: 0,
+ emailVerified: 0,
+ name: 0,
+ };
+
+ for (const row of rows) {
+ if (row.email) coverage.email++;
+ if (row.email_verified) coverage.emailVerified++;
+ if (row.name) coverage.name++;
+
+ exportLogger({ userId: row.id, status: 'success' }, dateTime);
+ }
+
+ closeAllStreams();
+
+ const outputPath = writeExportOutput(rows, outputFile);
+
+ return {
+ userCount: rows.length,
+ outputPath,
+ fieldCoverage: coverage,
+ };
+ } finally {
+ await client.end();
+ }
+}
+
+/**
+ * Displays the AuthJS export results as a field coverage report.
+ *
+ * @param result - Export result containing user count, output path, and per-field coverage stats
+ */
+export function displayAuthJSExportSummary(result: AuthJSExportResult): void {
+ const { userCount, outputPath, fieldCoverage } = result;
+
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: fieldCoverage.email },
+ { label: 'email verified', count: fieldCoverage.emailVerified },
+ { label: 'have name', count: fieldCoverage.name },
+ ],
+ userCount,
+ outputPath
+ );
+}
+
+/**
+ * CLI wrapper for the AuthJS export command.
+ *
+ * Prompts for a connection string if not provided via --db-url flag or
+ * AUTHJS_DB_URL environment variable, then exports users to a JSON file.
+ */
+export async function runAuthJSExport(): Promise {
+ p.intro(color.bgCyan(color.black('AuthJS User Export')));
+
+ const {
+ dbUrl: resolvedUrl,
+ outputFile,
+ warning,
+ } = resolveConnectionString(
+ process.argv.slice(2),
+ process.env as Record,
+ {
+ envVarName: 'AUTHJS_DB_URL',
+ defaultOutputFile: 'authjs-export.json',
+ }
+ );
+
+ let dbUrl = resolvedUrl;
+
+ if (warning) {
+ p.log.warn(color.yellow(warning));
+ }
+
+ // Prompt for connection string if not resolved from flag or env
+ if (!dbUrl) {
+ p.note(
+ `AuthJS stores data in your application database.\n\n${color.bold('PostgreSQL')}:\n ${color.dim('postgresql://user:password@host:5432/database')}\n\n${color.bold('MySQL')}:\n ${color.dim('mysql://user:password@host:3306/database')}\n\n${color.bold('SQLite')}:\n ${color.dim('/path/to/database.sqlite')}`,
+ 'Connection String'
+ );
+
+ const input = await p.text({
+ message: 'Enter your database connection string',
+ placeholder: 'postgresql://user:password@host:5432/database',
+ validate: (value) => {
+ if (!value || value.trim() === '') {
+ return 'Connection string is required';
+ }
+ if (!isValidConnectionString(value)) {
+ return 'Must be a valid database connection string (postgresql://, mysql://, or a file path)';
+ }
+ },
+ });
+
+ if (p.isCancel(input)) {
+ p.cancel('Export cancelled.');
+ process.exit(0);
+ }
+
+ dbUrl = input;
+ }
+
+ const spinner = p.spinner();
+ spinner.start('Connecting to database...');
+
+ try {
+ const result = await exportAuthJSUsers(dbUrl, outputFile);
+ spinner.stop(`Found ${result.userCount} users`);
+
+ displayAuthJSExportSummary(result);
+
+ p.log.info(
+ color.dim(
+ `Next step: run ${color.bold('bun run migrate')} and select "AuthJS" with file "exports/${outputFile}"`
+ )
+ );
+
+ p.outro(color.green('Export complete!'));
+ } catch (err) {
+ spinner.stop('Export failed');
+ const message = err instanceof Error ? err.message : String(err);
+ p.log.error(color.red(message));
+ process.exit(1);
+ }
+}
diff --git a/src/export/betterauth.ts b/src/export/betterauth.ts
new file mode 100644
index 0000000..1df81a0
--- /dev/null
+++ b/src/export/betterauth.ts
@@ -0,0 +1,447 @@
+/**
+ * Better Auth user export module
+ *
+ * Connects to a database used by Better Auth and exports users from the
+ * "user" and "account" tables to a JSON file.
+ *
+ * Supports PostgreSQL, MySQL, and SQLite via the shared database abstraction.
+ *
+ * Dynamically detects installed Better Auth plugins (username, phone number,
+ * admin, two-factor) and includes those columns when present.
+ *
+ * Core tables:
+ * - "user" (core user identity)
+ * - "account" (hashed passwords, provider info — joined on providerId='credential')
+ *
+ * Usage:
+ * bun run export:betterauth
+ * bun run export:betterauth -- --db-url postgresql://... --output users.json
+ *
+ * Environment variables:
+ * BETTER_AUTH_DB_URL - Database connection string
+ *
+ * Priority: --db-url flag > BETTER_AUTH_DB_URL env var > interactive prompt
+ */
+import * as p from '@clack/prompts';
+import color from 'picocolors';
+import { createDbClient, type DbClient, type DbType } from '../lib/db';
+import {
+ displayFieldCoverage,
+ getDateTimeStamp,
+ getDbConnectionErrorHint,
+ isValidConnectionString,
+ resolveConnectionString,
+ writeExportOutput,
+} from '../lib';
+import { closeAllStreams, exportLogger } from '../logger';
+import type { BaseExportResult } from '../types';
+
+/**
+ * Plugin columns that may exist on the "user" table depending on installed plugins.
+ *
+ * - username plugin: username, displayUsername
+ * - phone number plugin: phoneNumber, phoneNumberVerified
+ * - admin plugin: role, banned, banReason, banExpires
+ * - two-factor plugin: twoFactorEnabled
+ */
+const PLUGIN_COLUMNS = [
+ 'username',
+ 'displayUsername',
+ 'phoneNumber',
+ 'phoneNumberVerified',
+ 'role',
+ 'banned',
+ 'banReason',
+ 'banExpires',
+ 'twoFactorEnabled',
+] as const;
+
+type PluginColumn = (typeof PLUGIN_COLUMNS)[number];
+
+/**
+ * Detects which Better Auth plugin columns exist on the "user" table.
+ *
+ * For PostgreSQL/MySQL, queries information_schema.columns.
+ * For SQLite, uses PRAGMA table_info.
+ *
+ * @param client - Connected DbClient
+ * @returns Set of detected plugin column names
+ */
+export async function detectPluginColumns(
+ client: DbClient
+): Promise> {
+ if (client.dbType === 'sqlite') {
+ const result = await client.query<{ name: string }>(
+ `PRAGMA table_info("user")`
+ );
+ const columnNames = new Set(result.rows.map((r) => r.name));
+ const detected = new Set();
+ for (const col of PLUGIN_COLUMNS) {
+ if (columnNames.has(col)) detected.add(col);
+ }
+ return detected;
+ }
+
+ if (client.dbType === 'mysql') {
+ const result = await client.query<{ column_name: string }>(
+ `SELECT column_name
+ FROM information_schema.columns
+ WHERE table_name = 'user'
+ AND table_schema = DATABASE()
+ AND column_name IN (${PLUGIN_COLUMNS.map(() => '?').join(', ')})`,
+ [...PLUGIN_COLUMNS]
+ );
+ return new Set(result.rows.map((r) => r.column_name as PluginColumn));
+ }
+
+ // PostgreSQL
+ const result = await client.query<{ column_name: string }>(
+ `SELECT column_name
+ FROM information_schema.columns
+ WHERE table_name = 'user'
+ AND table_schema = current_schema()
+ AND column_name = ANY($1)`,
+ [PLUGIN_COLUMNS as unknown as string[]]
+ );
+
+ return new Set(result.rows.map((r) => r.column_name as PluginColumn));
+}
+
+/**
+ * Builds the export SQL query dynamically based on available plugin columns.
+ *
+ * Always includes core user fields and credential password from the account table.
+ * Adds plugin-specific columns only if they were detected by detectPluginColumns.
+ * Uses correct identifier quoting based on the database type.
+ *
+ * @param pluginColumns - Set of detected plugin column names
+ * @param dbType - Database type for correct identifier quoting
+ * @returns SQL SELECT query string
+ */
+export function buildExportQuery(
+ pluginColumns: Set,
+ dbType: DbType = 'postgres'
+): string {
+ // MySQL uses backticks, PostgreSQL/SQLite use double quotes
+ const q =
+ dbType === 'mysql' ? (s: string) => `\`${s}\`` : (s: string) => `"${s}"`;
+
+ const selects: string[] = [
+ // Core columns (always present)
+ `u.${q('id')} AS user_id`,
+ `u.${q('email')}`,
+ `u.${q('emailVerified')} AS email_verified`,
+ `u.${q('name')}`,
+ `u.${q('createdAt')} AS created_at`,
+ `u.${q('updatedAt')} AS updated_at`,
+ // Credential password from account table
+ `a.${q('password')} AS password_hash`,
+ ];
+
+ // Username plugin
+ if (pluginColumns.has('username')) selects.push(`u.${q('username')}`);
+ if (pluginColumns.has('displayUsername'))
+ selects.push(`u.${q('displayUsername')} AS display_username`);
+
+ // Phone number plugin
+ if (pluginColumns.has('phoneNumber'))
+ selects.push(`u.${q('phoneNumber')} AS phone_number`);
+ if (pluginColumns.has('phoneNumberVerified'))
+ selects.push(`u.${q('phoneNumberVerified')} AS phone_number_verified`);
+
+ // Admin plugin
+ if (pluginColumns.has('role')) selects.push(`u.${q('role')}`);
+ if (pluginColumns.has('banned')) selects.push(`u.${q('banned')}`);
+ if (pluginColumns.has('banReason'))
+ selects.push(`u.${q('banReason')} AS ban_reason`);
+ if (pluginColumns.has('banExpires'))
+ selects.push(`u.${q('banExpires')} AS ban_expires`);
+
+ // Two-factor plugin
+ if (pluginColumns.has('twoFactorEnabled'))
+ selects.push(`u.${q('twoFactorEnabled')} AS two_factor_enabled`);
+
+ return [
+ 'SELECT',
+ ` ${selects.join(',\n ')}`,
+ `FROM ${q('user')} u`,
+ `LEFT JOIN ${q('account')} a ON a.${q('userId')} = u.${q('id')} AND a.${q('providerId')} = 'credential'`,
+ `ORDER BY u.${q('createdAt')} ASC`,
+ ].join('\n');
+}
+
+/**
+ * Determines which Better Auth plugins are installed based on detected columns.
+ *
+ * @param pluginColumns - Set of detected plugin column names
+ * @returns Array of human-readable plugin names
+ */
+export function getDetectedPluginNames(
+ pluginColumns: Set
+): string[] {
+ const plugins: string[] = [];
+ if (pluginColumns.has('username') || pluginColumns.has('displayUsername'))
+ plugins.push('username');
+ if (
+ pluginColumns.has('phoneNumber') ||
+ pluginColumns.has('phoneNumberVerified')
+ )
+ plugins.push('phone number');
+ if (
+ pluginColumns.has('role') ||
+ pluginColumns.has('banned') ||
+ pluginColumns.has('banReason') ||
+ pluginColumns.has('banExpires')
+ )
+ plugins.push('admin');
+ if (pluginColumns.has('twoFactorEnabled')) plugins.push('two-factor');
+ return plugins;
+}
+
+interface BetterAuthExportResult extends BaseExportResult {
+ detectedPlugins: string[];
+ fieldCoverage: {
+ email: number;
+ emailVerified: number;
+ name: number;
+ password: number;
+ username: number;
+ phone: number;
+ };
+}
+
+interface BetterAuthUserRow {
+ user_id: string;
+ email: string | null;
+ email_verified: boolean | null;
+ name: string | null;
+ password_hash: string | null;
+ username?: string | null;
+ phone_number?: string | null;
+ [key: string]: unknown;
+}
+
+/**
+ * Exports users from a Better Auth database to a JSON file.
+ *
+ * Connects to the database, detects installed plugins, builds a dynamic
+ * query, and writes the results to the exports/ directory.
+ *
+ * Supports PostgreSQL, MySQL, and SQLite databases.
+ *
+ * @param dbUrl - Database connection string or file path
+ * @param outputFile - Output file name (written inside exports/ directory)
+ * @returns Export result with user count, detected plugins, and field coverage stats
+ */
+export async function exportBetterAuthUsers(
+ dbUrl: string,
+ outputFile: string
+): Promise {
+ let client: DbClient;
+
+ try {
+ client = await createDbClient(dbUrl);
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+ const hint = getDbConnectionErrorHint(message, 'betterauth');
+ throw new Error(`Failed to connect to database: ${message}\n\n${hint}`);
+ }
+
+ try {
+ // Detect which plugin columns exist
+ let pluginColumns: Set;
+ try {
+ pluginColumns = await detectPluginColumns(client);
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+ if (
+ message.includes('does not exist') ||
+ message.includes("doesn't exist") ||
+ message.includes('no such table') ||
+ message.includes('permission denied')
+ ) {
+ throw new Error(
+ `Could not read from "user" table: ${message}\n\n` +
+ 'The "user" table is created by Better Auth when the database is initialized.\n' +
+ 'Ensure Better Auth has been set up and the database has been migrated.\n' +
+ 'If you customized table names in your Better Auth config, update the\n' +
+ 'table references in this export script to match.'
+ );
+ }
+ throw err;
+ }
+
+ const detectedPlugins = getDetectedPluginNames(pluginColumns);
+
+ // Build and execute the dynamic query
+ const query = buildExportQuery(pluginColumns, client.dbType);
+
+ let rows: BetterAuthUserRow[];
+ try {
+ ({ rows } = await client.query(query));
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+ if (
+ message.includes('does not exist') ||
+ message.includes("doesn't exist") ||
+ message.includes('no such table') ||
+ message.includes('permission denied')
+ ) {
+ throw new Error(
+ `Could not query Better Auth tables: ${message}\n\n` +
+ 'Ensure the "user" and "account" tables exist and the database\n' +
+ 'user has SELECT permission on them.'
+ );
+ }
+ throw err;
+ }
+
+ // Calculate field coverage and log each user
+ const dateTime = getDateTimeStamp();
+ const coverage = {
+ email: 0,
+ emailVerified: 0,
+ name: 0,
+ password: 0,
+ username: 0,
+ phone: 0,
+ };
+
+ for (const row of rows) {
+ if (row.email) coverage.email++;
+ if (row.email_verified) coverage.emailVerified++;
+ if (row.name) coverage.name++;
+ if (row.password_hash) coverage.password++;
+ if (row.username) coverage.username++;
+ if (row.phone_number) coverage.phone++;
+
+ exportLogger({ userId: row.user_id, status: 'success' }, dateTime);
+ }
+
+ closeAllStreams();
+
+ const outputPath = writeExportOutput(rows, outputFile);
+
+ return {
+ userCount: rows.length,
+ outputPath,
+ detectedPlugins,
+ fieldCoverage: coverage,
+ };
+ } finally {
+ await client.end();
+ }
+}
+
+/**
+ * Displays the Better Auth export results as a field coverage report.
+ *
+ * Shows detected plugins and per-field coverage with colored icons.
+ *
+ * @param result - Export result containing user count, detected plugins, and per-field coverage stats
+ */
+export function displayBetterAuthExportSummary(
+ result: BetterAuthExportResult
+): void {
+ const { userCount, outputPath, detectedPlugins, fieldCoverage } = result;
+
+ // Show detected plugins
+ if (detectedPlugins.length > 0) {
+ p.log.info(
+ `Detected plugins: ${detectedPlugins.map((pl) => color.cyan(pl)).join(', ')}`
+ );
+ }
+
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: fieldCoverage.email },
+ { label: 'email verified', count: fieldCoverage.emailVerified },
+ { label: 'have name', count: fieldCoverage.name },
+ { label: 'have password hash', count: fieldCoverage.password },
+ { label: 'have username', count: fieldCoverage.username },
+ { label: 'have phone', count: fieldCoverage.phone },
+ ],
+ userCount,
+ outputPath
+ );
+}
+
+/**
+ * CLI wrapper for the Better Auth export command
+ *
+ * Prompts for a connection string if not provided via --db-url flag or
+ * BETTER_AUTH_DB_URL environment variable, then exports users to a JSON file.
+ */
+export async function runBetterAuthExport(): Promise {
+ p.intro(color.bgCyan(color.black('Better Auth User Export')));
+
+ const {
+ dbUrl: resolvedUrl,
+ outputFile,
+ warning,
+ } = resolveConnectionString(
+ process.argv.slice(2),
+ process.env as Record,
+ {
+ envVarName: 'BETTER_AUTH_DB_URL',
+ defaultOutputFile: 'betterauth-export.json',
+ }
+ );
+
+ let dbUrl = resolvedUrl;
+
+ if (warning) {
+ p.log.warn(color.yellow(warning));
+ }
+
+ // Prompt for connection string if not resolved from flag or env
+ if (!dbUrl) {
+ p.note(
+ `Better Auth stores data in your application database.\n\n${color.bold('PostgreSQL')}:\n ${color.dim('postgresql://user:password@host:5432/database')}\n\n${color.bold('MySQL')}:\n ${color.dim('mysql://user:password@host:3306/database')}\n\n${color.bold('SQLite')}:\n ${color.dim('/path/to/database.sqlite')}`,
+ 'Connection String'
+ );
+
+ const input = await p.text({
+ message: 'Enter your database connection string',
+ placeholder: 'postgresql://user:password@host:5432/database',
+ validate: (value) => {
+ if (!value || value.trim() === '') {
+ return 'Connection string is required';
+ }
+ if (!isValidConnectionString(value)) {
+ return 'Must be a valid database connection string (postgresql://, mysql://, or a file path)';
+ }
+ },
+ });
+
+ if (p.isCancel(input)) {
+ p.cancel('Export cancelled.');
+ process.exit(0);
+ }
+
+ dbUrl = input;
+ }
+
+ const spinner = p.spinner();
+ spinner.start('Connecting to database...');
+
+ try {
+ const result = await exportBetterAuthUsers(dbUrl, outputFile);
+ spinner.stop(`Found ${result.userCount} users`);
+
+ displayBetterAuthExportSummary(result);
+
+ p.log.info(
+ color.dim(
+ `Next step: run ${color.bold('bun run migrate')} with file "exports/${outputFile}"`
+ )
+ );
+
+ p.outro(color.green('Export complete!'));
+ } catch (err) {
+ spinner.stop('Export failed');
+ const message = err instanceof Error ? err.message : String(err);
+ p.log.error(color.red(message));
+ process.exit(1);
+ }
+}
diff --git a/src/export/clerk.ts b/src/export/clerk.ts
new file mode 100644
index 0000000..2f368eb
--- /dev/null
+++ b/src/export/clerk.ts
@@ -0,0 +1,286 @@
+/**
+ * Clerk user export module
+ *
+ * Exports users from a Clerk instance to a JSON file compatible with the
+ * migration script's Clerk transformer for instance-to-instance migration.
+ *
+ * Note: The Clerk API does not return sensitive fields like passwords,
+ * TOTP secrets, or backup codes. Only `passwordEnabled`, `totpEnabled`,
+ * and `backupCodeEnabled` booleans are available.
+ *
+ * Usage:
+ * bun run export:clerk
+ * bun run export:clerk -- --output my-users.json
+ */
+import 'dotenv/config';
+import { createClerkClient } from '@clerk/backend';
+import type { User } from '@clerk/backend';
+import * as p from '@clack/prompts';
+import color from 'picocolors';
+import {
+ displayFieldCoverage,
+ getDateTimeStamp,
+ writeExportOutput,
+} from '../lib';
+import { env } from '../envs-constants';
+import { closeAllStreams, exportLogger } from '../logger';
+import type { BaseExportResult } from '../types';
+
+const LIMIT = 500;
+
+/**
+ * Recursively fetches all users from a Clerk instance using pagination
+ * @param allUsers - Accumulator for collected users
+ * @param offset - Current pagination offset
+ * @returns Array of all Clerk User objects
+ */
+async function fetchAllUsers(
+ allUsers: User[] = [],
+ offset: number = 0
+): Promise {
+ const clerk = createClerkClient({ secretKey: env.CLERK_SECRET_KEY });
+ const { data } = await clerk.users.getUserList({
+ offset,
+ limit: LIMIT,
+ });
+ allUsers.push(...data);
+
+ if (data.length === LIMIT) {
+ return fetchAllUsers(allUsers, offset + LIMIT);
+ }
+
+ return allUsers;
+}
+
+/**
+ * Maps a Clerk User object to the export format compatible with the migration schema.
+ *
+ * Categorizes emails and phones by verification status, placing primary identifiers
+ * first. Only includes fields that have values.
+ *
+ * @param user - A Clerk User object from the API
+ * @returns A record with migration-compatible field names
+ */
+export function mapUserToExport(user: User): Record {
+ const exported: Record = {};
+
+ exported.userId = user.id;
+
+ // Categorize emails by verification status
+ const verifiedEmails: string[] = [];
+ const unverifiedEmails: string[] = [];
+ for (const ea of user.emailAddresses) {
+ if (ea.verification?.status === 'verified') {
+ verifiedEmails.push(ea.emailAddress);
+ } else {
+ unverifiedEmails.push(ea.emailAddress);
+ }
+ }
+
+ // Primary email first, then additional verified
+ const primaryEmailAddr = user.primaryEmailAddress?.emailAddress;
+ if (primaryEmailAddr) {
+ exported.email = primaryEmailAddr;
+ const additional = verifiedEmails.filter((e) => e !== primaryEmailAddr);
+ if (additional.length > 0) exported.emailAddresses = additional;
+ } else if (verifiedEmails.length > 0) {
+ exported.email = verifiedEmails[0];
+ if (verifiedEmails.length > 1)
+ exported.emailAddresses = verifiedEmails.slice(1);
+ }
+ if (unverifiedEmails.length > 0)
+ exported.unverifiedEmailAddresses = unverifiedEmails;
+
+ // Categorize phones by verification status
+ const verifiedPhones: string[] = [];
+ const unverifiedPhones: string[] = [];
+ for (const pn of user.phoneNumbers) {
+ if (pn.verification?.status === 'verified') {
+ verifiedPhones.push(pn.phoneNumber);
+ } else {
+ unverifiedPhones.push(pn.phoneNumber);
+ }
+ }
+
+ const primaryPhoneNum = user.primaryPhoneNumber?.phoneNumber;
+ if (primaryPhoneNum) {
+ exported.phone = primaryPhoneNum;
+ const additional = verifiedPhones.filter((ph) => ph !== primaryPhoneNum);
+ if (additional.length > 0) exported.phoneNumbers = additional;
+ } else if (verifiedPhones.length > 0) {
+ exported.phone = verifiedPhones[0];
+ if (verifiedPhones.length > 1)
+ exported.phoneNumbers = verifiedPhones.slice(1);
+ }
+ if (unverifiedPhones.length > 0)
+ exported.unverifiedPhoneNumbers = unverifiedPhones;
+
+ // Simple fields
+ if (user.username) exported.username = user.username;
+ if (user.firstName) exported.firstName = user.firstName;
+ if (user.lastName) exported.lastName = user.lastName;
+
+ // Metadata (include only when non-empty)
+ if (Object.keys(user.publicMetadata).length > 0)
+ exported.publicMetadata = user.publicMetadata;
+ if (Object.keys(user.privateMetadata).length > 0)
+ exported.privateMetadata = user.privateMetadata;
+ if (Object.keys(user.unsafeMetadata).length > 0)
+ exported.unsafeMetadata = user.unsafeMetadata;
+
+ // Clerk API fields
+ if (user.banned) exported.banned = user.banned;
+ exported.createOrganizationEnabled = user.createOrganizationEnabled;
+ if (user.createOrganizationsLimit !== null)
+ exported.createOrganizationsLimit = user.createOrganizationsLimit;
+ exported.deleteSelfEnabled = user.deleteSelfEnabled;
+
+ // Timestamps: convert from unix ms to ISO string
+ if (user.createdAt)
+ exported.createdAt = new Date(user.createdAt).toISOString();
+ if (user.legalAcceptedAt)
+ exported.legalAcceptedAt = new Date(user.legalAcceptedAt).toISOString();
+
+ return exported;
+}
+
+interface ClerkExportResult extends BaseExportResult {
+ fieldCoverage: {
+ email: number;
+ username: number;
+ firstName: number;
+ lastName: number;
+ phone: number;
+ password: number;
+ };
+}
+
+/**
+ * Exports all users from a Clerk instance to a JSON file.
+ *
+ * Fetches all users via pagination, maps them to the migration schema format,
+ * and writes the result to the exports/ directory.
+ *
+ * @param outputFile - Output file name (written inside exports/ directory)
+ * @returns Export result with user count, output path, and field coverage stats
+ */
+export async function exportClerkUsers(
+ outputFile: string
+): Promise {
+ const allUsers = await fetchAllUsers();
+ const dateTime = getDateTimeStamp();
+ const exportedUsers: Record[] = [];
+
+ const coverage = {
+ email: 0,
+ username: 0,
+ firstName: 0,
+ lastName: 0,
+ phone: 0,
+ password: 0,
+ };
+
+ for (const user of allUsers) {
+ try {
+ const mapped = mapUserToExport(user);
+ exportedUsers.push(mapped);
+
+ // Track field coverage
+ if (mapped.email) coverage.email++;
+ if (mapped.username) coverage.username++;
+ if (mapped.firstName) coverage.firstName++;
+ if (mapped.lastName) coverage.lastName++;
+ if (mapped.phone) coverage.phone++;
+ if (user.passwordEnabled) coverage.password++;
+
+ exportLogger({ userId: user.id, status: 'success' }, dateTime);
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+ exportLogger(
+ { userId: user.id, status: 'error', error: message },
+ dateTime
+ );
+ }
+ }
+
+ closeAllStreams();
+
+ const outputPath = writeExportOutput(exportedUsers, outputFile);
+
+ return {
+ userCount: exportedUsers.length,
+ outputPath,
+ fieldCoverage: coverage,
+ };
+}
+
+/**
+ * Displays the Clerk export results as a field coverage report.
+ *
+ * @param result - Export result containing user count, output path, and per-field coverage stats
+ */
+export function displayClerkExportSummary(result: ClerkExportResult): void {
+ const { userCount, outputPath, fieldCoverage } = result;
+
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: fieldCoverage.email },
+ { label: 'have username', count: fieldCoverage.username },
+ { label: 'have first name', count: fieldCoverage.firstName },
+ { label: 'have last name', count: fieldCoverage.lastName },
+ { label: 'have phone', count: fieldCoverage.phone },
+ {
+ label: 'have password (passwordEnabled)',
+ count: fieldCoverage.password,
+ },
+ ],
+ userCount,
+ outputPath
+ );
+}
+
+/**
+ * Parses the --output flag from CLI arguments
+ * @returns The output file name or undefined if not provided
+ */
+function parseOutputArg(): string | undefined {
+ const args = process.argv.slice(2);
+ for (let i = 0; i < args.length; i++) {
+ if (args[i] === '--output' && args[i + 1]) {
+ return args[i + 1];
+ }
+ }
+ return undefined;
+}
+
+/**
+ * CLI wrapper for the Clerk export command
+ *
+ * Displays an interactive CLI with spinner, fetches all users,
+ * and writes them to a JSON file in exports/.
+ */
+export async function runClerkExport(): Promise {
+ p.intro(color.bgCyan(color.black('Clerk User Export')));
+
+ const outputFile = parseOutputArg() || 'clerk-export.json';
+
+ const spinner = p.spinner();
+ spinner.start('Fetching users from Clerk...');
+
+ try {
+ const result = await exportClerkUsers(outputFile);
+ spinner.stop(`Found ${result.userCount} users`);
+ displayClerkExportSummary(result);
+ p.log.info(
+ color.dim(
+ `Next step: run ${color.bold('bun run migrate')} and select "Clerk" with file "exports/${outputFile}"`
+ )
+ );
+ p.outro(color.green('Export complete!'));
+ } catch (err) {
+ spinner.stop('Export failed');
+ const message = err instanceof Error ? err.message : String(err);
+ p.log.error(color.red(message));
+ process.exit(1);
+ }
+}
diff --git a/src/export/firebase.ts b/src/export/firebase.ts
new file mode 100644
index 0000000..819bd1f
--- /dev/null
+++ b/src/export/firebase.ts
@@ -0,0 +1,269 @@
+/**
+ * Firebase user export module
+ *
+ * Exports users from a Firebase project via the Admin SDK to a JSON file
+ * compatible with the migration script's Firebase transformer.
+ *
+ * Uses admin.auth().listUsers() for paginated user fetching. The Admin SDK
+ * returns UserRecord objects including passwordHash and passwordSalt fields
+ * when available.
+ *
+ * Usage:
+ * bun run export:firebase
+ * bun run export:firebase -- --service-account ./service-account.json --output users.json
+ *
+ * Environment variables:
+ * GOOGLE_APPLICATION_CREDENTIALS - Path to Firebase service account JSON key file
+ *
+ * Priority: --service-account flag > GOOGLE_APPLICATION_CREDENTIALS env var > interactive prompt
+ */
+import 'dotenv/config';
+import fs from 'fs';
+import * as admin from 'firebase-admin';
+import * as p from '@clack/prompts';
+import color from 'picocolors';
+import {
+ displayFieldCoverage,
+ getDateTimeStamp,
+ writeExportOutput,
+} from '../lib';
+import { closeAllStreams, exportLogger } from '../logger';
+import type { BaseExportResult } from '../types';
+
+const PAGE_SIZE = 1000;
+
+interface FirebaseExportResult extends BaseExportResult {
+ fieldCoverage: {
+ email: number;
+ emailVerified: number;
+ passwordHash: number;
+ phone: number;
+ displayName: number;
+ };
+}
+
+/**
+ * Maps a Firebase UserRecord to the export format expected by the Firebase transformer.
+ *
+ * @param user - A Firebase UserRecord from the Admin SDK
+ * @returns A record with Firebase transformer-compatible field names
+ */
+export function mapFirebaseUserToExport(
+ user: admin.auth.UserRecord
+): Record {
+ const exported: Record = {};
+
+ exported.localId = user.uid;
+ if (user.email) exported.email = user.email;
+ exported.emailVerified = user.emailVerified;
+ if (user.passwordHash) exported.passwordHash = user.passwordHash;
+ if (user.passwordSalt) exported.passwordSalt = user.passwordSalt;
+ if (user.displayName) exported.displayName = user.displayName;
+ if (user.phoneNumber) exported.phoneNumber = user.phoneNumber;
+ if (user.disabled) exported.disabled = user.disabled;
+ if (user.metadata.creationTime)
+ exported.createdAt = new Date(user.metadata.creationTime).getTime();
+ if (user.metadata.lastSignInTime)
+ exported.lastSignedInAt = new Date(user.metadata.lastSignInTime).getTime();
+
+ return exported;
+}
+
+/**
+ * Exports all users from a Firebase project to a JSON file.
+ *
+ * Fetches users via the Admin SDK with pagination (1000 per page),
+ * maps them to the Firebase transformer format, and writes to exports/.
+ *
+ * @param serviceAccountPath - Path to the Firebase service account JSON key file
+ * @param outputFile - Output file name (written inside exports/ directory)
+ * @returns Export result with user count, output path, and field coverage stats
+ */
+export async function exportFirebaseUsers(
+ serviceAccountPath: string,
+ outputFile: string
+): Promise {
+ const serviceAccount = JSON.parse(
+ fs.readFileSync(serviceAccountPath, 'utf-8')
+ ) as admin.ServiceAccount;
+
+ const app = admin.initializeApp({
+ credential: admin.credential.cert(serviceAccount),
+ });
+
+ try {
+ const dateTime = getDateTimeStamp();
+ const allUsers: Record[] = [];
+
+ const coverage = {
+ email: 0,
+ emailVerified: 0,
+ passwordHash: 0,
+ phone: 0,
+ displayName: 0,
+ };
+
+ let pageToken: string | undefined;
+
+ do {
+ const listResult = await admin.auth().listUsers(PAGE_SIZE, pageToken);
+
+ for (const user of listResult.users) {
+ const mapped = mapFirebaseUserToExport(user);
+ allUsers.push(mapped);
+
+ if (mapped.email) coverage.email++;
+ if (mapped.emailVerified) coverage.emailVerified++;
+ if (mapped.passwordHash) coverage.passwordHash++;
+ if (mapped.phoneNumber) coverage.phone++;
+ if (mapped.displayName) coverage.displayName++;
+
+ exportLogger({ userId: user.uid, status: 'success' }, dateTime);
+ }
+
+ pageToken = listResult.pageToken;
+ } while (pageToken);
+
+ closeAllStreams();
+
+ const outputPath = writeExportOutput(allUsers, outputFile);
+
+ return {
+ userCount: allUsers.length,
+ outputPath,
+ fieldCoverage: coverage,
+ };
+ } finally {
+ await app.delete();
+ }
+}
+
+/**
+ * Displays the Firebase export results as a field coverage report.
+ *
+ * @param result - Export result containing user count, output path, and per-field coverage stats
+ */
+export function displayFirebaseExportSummary(
+ result: FirebaseExportResult
+): void {
+ const { userCount, outputPath, fieldCoverage } = result;
+
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: fieldCoverage.email },
+ { label: 'email verified', count: fieldCoverage.emailVerified },
+ { label: 'have password hash', count: fieldCoverage.passwordHash },
+ { label: 'have phone', count: fieldCoverage.phone },
+ { label: 'have display name', count: fieldCoverage.displayName },
+ ],
+ userCount,
+ outputPath
+ );
+
+ if (fieldCoverage.passwordHash === 0 && userCount > 0) {
+ p.log.info(
+ color.dim(
+ 'No password hashes found. Ensure you are using a project-level service account.'
+ )
+ );
+ }
+}
+
+/**
+ * Parses Firebase-specific CLI flags from process arguments.
+ */
+function parseFirebaseArgs(): {
+ serviceAccount?: string;
+ output?: string;
+} {
+ const args = process.argv.slice(2);
+ const result: Record = {};
+
+ for (let i = 0; i < args.length; i++) {
+ if (args[i] === '--service-account' && args[i + 1]) {
+ result.serviceAccount = args[i + 1];
+ i++;
+ } else if (args[i] === '--output' && args[i + 1]) {
+ result.output = args[i + 1];
+ i++;
+ }
+ }
+
+ return result;
+}
+
+/**
+ * CLI wrapper for the Firebase export command.
+ *
+ * Resolves the service account path from CLI flags, env vars, or interactive prompt,
+ * then exports users to a JSON file.
+ */
+export async function runFirebaseExport(): Promise {
+ p.intro(color.bgCyan(color.black('Firebase User Export')));
+
+ const cliArgs = parseFirebaseArgs();
+ const env = process.env as Record;
+
+ let serviceAccountPath =
+ cliArgs.serviceAccount || env.GOOGLE_APPLICATION_CREDENTIALS;
+ const outputFile = cliArgs.output || 'firebase-export.json';
+
+ // Prompt for service account path if not resolved
+ if (!serviceAccountPath) {
+ const input = await p.text({
+ message: 'Enter the path to your Firebase service account JSON key file',
+ placeholder: './service-account.json',
+ validate: (value) => {
+ if (!value || value.trim() === '') {
+ return 'Service account path is required';
+ }
+ if (!fs.existsSync(value)) {
+ return `File not found: ${value}`;
+ }
+ },
+ });
+ if (p.isCancel(input)) {
+ p.cancel('Export cancelled.');
+ process.exit(0);
+ }
+ serviceAccountPath = input;
+ }
+
+ // Validate the file exists and is valid JSON
+ if (!fs.existsSync(serviceAccountPath)) {
+ p.log.error(color.red(`File not found: ${serviceAccountPath}`));
+ process.exit(1);
+ }
+
+ try {
+ JSON.parse(fs.readFileSync(serviceAccountPath, 'utf-8'));
+ } catch {
+ p.log.error(
+ color.red(`Invalid JSON in service account file: ${serviceAccountPath}`)
+ );
+ process.exit(1);
+ }
+
+ const spinner = p.spinner();
+ spinner.start('Fetching users from Firebase...');
+
+ try {
+ const result = await exportFirebaseUsers(serviceAccountPath, outputFile);
+ spinner.stop(`Found ${result.userCount} users`);
+
+ displayFirebaseExportSummary(result);
+
+ p.log.info(
+ color.dim(
+ `Next step: run ${color.bold('bun run migrate')} and select "Firebase" with file "exports/${outputFile}"`
+ )
+ );
+
+ p.outro(color.green('Export complete!'));
+ } catch (err) {
+ spinner.stop('Export failed');
+ const message = err instanceof Error ? err.message : String(err);
+ p.log.error(color.red(message));
+ process.exit(1);
+ }
+}
diff --git a/src/export/index.ts b/src/export/index.ts
index 1863c67..9e50eee 100644
--- a/src/export/index.ts
+++ b/src/export/index.ts
@@ -1,93 +1,67 @@
/**
- * Supabase user export CLI
+ * Export entry point
*
- * Exports users from a Supabase Postgres database to a JSON file
- * compatible with the migration script's Supabase transformer.
+ * Dispatches to the appropriate platform-specific export based on
+ * CLI flags or interactive selection. Reads available exports from
+ * the registry in registry.ts.
*
* Usage:
- * bun run export:supabase
- * bun run export:supabase --db-url postgresql://... --output users.json
- *
- * Environment variables:
- * SUPABASE_DB_URL - Postgres connection string
- *
- * Priority: --db-url flag > SUPABASE_DB_URL env var > interactive prompt
+ * bun run export # Interactive platform picker
+ * bun run export -- --platform auth0 # Direct Auth0 export
+ * bun run export -- --platform authjs # Direct AuthJS export
+ * bun run export -- --platform betterauth # Direct Better Auth export
+ * bun run export -- --platform clerk # Direct Clerk export
+ * bun run export -- --platform firebase # Direct Firebase export
+ * bun run export -- --platform supabase # Direct Supabase export
*/
import 'dotenv/config';
import * as p from '@clack/prompts';
-import color from 'picocolors';
-import { displayExportSummary, exportSupabaseUsers } from './supabase';
-import { isValidConnectionString, resolveConnectionString } from '../utils';
-
-async function main() {
- p.intro(color.bgCyan(color.black('Supabase User Export')));
-
- const {
- dbUrl: resolvedUrl,
- outputFile,
- warning,
- } = resolveConnectionString(
- process.argv.slice(2),
- process.env as Record
- );
+import { exports } from './registry';
- let dbUrl = resolvedUrl;
-
- if (warning) {
- p.log.warn(color.yellow(warning));
+/**
+ * Parses the --platform flag from CLI arguments
+ * @returns The platform value or undefined if not provided
+ */
+function parsePlatformArg(): string | undefined {
+ const args = process.argv.slice(2);
+ for (let i = 0; i < args.length; i++) {
+ if (args[i] === '--platform' && args[i + 1]) {
+ return args[i + 1];
+ }
}
+ return undefined;
+}
+
+async function main() {
+ const platformArg = parsePlatformArg();
- // Prompt for connection string if not resolved from flag or env
- if (!dbUrl) {
- p.note(
- `Find this in the Supabase Dashboard by clicking the ${color.bold('Connect')} button.\n\n${color.bold('Direct connection')} (requires IPv4 add-on):\n ${color.dim('postgresql://postgres:[PASSWORD]@db.[REF].supabase.co:5432/postgres')}\n\n${color.bold('Pooler connection')} (works without IPv4 add-on):\n ${color.dim('postgres://postgres.[REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:6543/postgres')}`,
- 'Connection String'
- );
+ let platform = platformArg;
- const input = await p.text({
- message: 'Enter your Supabase Postgres connection string',
- placeholder:
- 'postgresql://postgres:[PASSWORD]@db.[REF].supabase.co:5432/postgres',
- validate: (value) => {
- if (!value || value.trim() === '') {
- return 'Connection string is required';
- }
- if (!isValidConnectionString(value)) {
- return 'Must be a valid Postgres connection string (postgresql://...)';
- }
- },
+ if (!platform) {
+ const selected = await p.select({
+ message: 'Which platform would you like to export from?',
+ options: exports.map((e) => ({
+ value: e.key,
+ label: e.label,
+ description: e.description,
+ })),
});
- if (p.isCancel(input)) {
+ if (p.isCancel(selected)) {
p.cancel('Export cancelled.');
process.exit(0);
}
-
- dbUrl = input;
+ platform = selected;
}
- const spinner = p.spinner();
- spinner.start('Connecting to Supabase database...');
+ const entry = exports.find((e) => e.key === platform);
- try {
- const result = await exportSupabaseUsers(dbUrl, outputFile);
- spinner.stop(`Found ${result.userCount} users`);
-
- displayExportSummary(result);
-
- p.log.info(
- color.dim(
- `Next step: run ${color.bold('bun run migrate')} and select "Supabase" with file "${outputFile}"`
- )
- );
-
- p.outro(color.green('Export complete!'));
- } catch (err) {
- spinner.stop('Export failed');
- const message = err instanceof Error ? err.message : String(err);
- p.log.error(color.red(message));
+ if (!entry) {
+ p.log.error(`Unknown platform: ${platform}`);
process.exit(1);
}
+
+ await entry.run();
}
void main();
diff --git a/src/export/registry.ts b/src/export/registry.ts
new file mode 100644
index 0000000..2b4ee9e
--- /dev/null
+++ b/src/export/registry.ts
@@ -0,0 +1,58 @@
+/**
+ * Export registry
+ *
+ * Central registration of all available export modules. The dispatcher
+ * in index.ts reads from this array to build the interactive picker
+ * and resolve --platform flags.
+ *
+ * To add a new export:
+ * 1. Create src/export/[platform].ts with a runXxxExport function
+ * 2. Import and register it here
+ * 3. Add an "export:[platform]" script to package.json
+ */
+import type { ExportRegistryEntry } from '../types';
+import { runAuth0Export } from './auth0';
+import { runAuthJSExport } from './authjs';
+import { runBetterAuthExport } from './betterauth';
+import { runClerkExport } from './clerk';
+import { runFirebaseExport } from './firebase';
+import { runSupabaseExport } from './supabase';
+
+export const exports: ExportRegistryEntry[] = [
+ {
+ key: 'auth0',
+ label: 'Auth0',
+ description: 'Export users from your Auth0 tenant',
+ run: runAuth0Export,
+ },
+ {
+ key: 'authjs',
+ label: 'AuthJS (Next-Auth)',
+ description: 'Export users from an AuthJS database',
+ run: runAuthJSExport,
+ },
+ {
+ key: 'betterauth',
+ label: 'Better Auth',
+ description: 'Export users from a Better Auth database',
+ run: runBetterAuthExport,
+ },
+ {
+ key: 'clerk',
+ label: 'Clerk',
+ description: 'Export users from your Clerk instance',
+ run: runClerkExport,
+ },
+ {
+ key: 'firebase',
+ label: 'Firebase',
+ description: 'Export users from your Firebase project',
+ run: runFirebaseExport,
+ },
+ {
+ key: 'supabase',
+ label: 'Supabase',
+ description: 'Export users from a Supabase Postgres database',
+ run: runSupabaseExport,
+ },
+];
diff --git a/src/export/supabase.ts b/src/export/supabase.ts
index ce2c80b..b04af64 100644
--- a/src/export/supabase.ts
+++ b/src/export/supabase.ts
@@ -8,12 +8,27 @@
* - encrypted_password (bcrypt hashes) — not available via Supabase Admin API
* - first_name extracted from raw_user_meta_data.display_name
* - All standard auth fields (email, phone, confirmation status, metadata)
+ *
+ * Usage:
+ * bun run export:supabase
+ * bun run export:supabase -- --db-url postgresql://... --output users.json
+ *
+ * Environment variables:
+ * SUPABASE_DB_URL - Postgres connection string
+ *
+ * Priority: --db-url flag > SUPABASE_DB_URL env var > interactive prompt
*/
import { Client } from 'pg';
-import fs from 'fs';
-import path from 'path';
import * as p from '@clack/prompts';
import color from 'picocolors';
+import {
+ displayFieldCoverage,
+ getDbConnectionErrorHint,
+ isValidConnectionString,
+ resolveConnectionString,
+ writeExportOutput,
+} from '../lib';
+import type { BaseExportResult } from '../types';
/**
* SQL query that exports users in the format expected by the Supabase transformer.
@@ -42,9 +57,7 @@ const EXPORT_QUERY = `
ORDER BY created_at
`;
-interface ExportResult {
- userCount: number;
- outputPath: string;
+interface SupabaseExportResult extends BaseExportResult {
fieldCoverage: {
email: number;
emailConfirmed: number;
@@ -65,33 +78,14 @@ interface ExportResult {
export async function exportSupabaseUsers(
dbUrl: string,
outputFile: string
-): Promise {
+): Promise {
const client = new Client({ connectionString: dbUrl });
try {
await client.connect();
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
- let hint: string;
- if (message.includes('ENOTFOUND')) {
- hint =
- 'The hostname could not be resolved. Check the project ref in your connection string.';
- } else if (
- message.includes('ETIMEDOUT') ||
- message.includes('ENETUNREACH')
- ) {
- hint =
- 'Direct connections require the IPv4 add-on. Use a pooler connection instead,\n' +
- 'or enable IPv4 in Supabase Dashboard → Settings → Add-Ons.';
- } else if (
- message.includes('authentication failed') ||
- message.includes('password')
- ) {
- hint = 'Check the password in your connection string.';
- } else {
- hint =
- 'Verify your connection string and ensure your Supabase project is accessible.';
- }
+ const hint = getDbConnectionErrorHint(message, 'supabase');
throw new Error(
`Failed to connect to Supabase database: ${message}\n\n${hint}`
);
@@ -146,12 +140,7 @@ export async function exportSupabaseUsers(
if (row.last_name) coverage.lastName++;
}
- // Write output
- const outputPath = path.isAbsolute(outputFile)
- ? outputFile
- : path.join(process.cwd(), outputFile);
-
- fs.writeFileSync(outputPath, JSON.stringify(rows, null, 2));
+ const outputPath = writeExportOutput(rows, outputFile);
return {
userCount: rows.length,
@@ -166,33 +155,98 @@ export async function exportSupabaseUsers(
/**
* Displays the export results as a field coverage report and success message.
*
- * Shows each field with an icon indicating coverage level:
- * - ● green — all users have this field
- * - ○ yellow — some users have this field
- * - ○ dim — no users have this field
- *
* @param result - Export result containing user count, output path, and per-field coverage stats
*/
-export function displayExportSummary(result: ExportResult): void {
+export function displayExportSummary(result: SupabaseExportResult): void {
const { userCount, outputPath, fieldCoverage } = result;
- /** Returns a colored icon based on how many users have a given field. */
- const getCoverageIcon = (count: number, total: number): string => {
- if (count === total) return color.green('●');
- if (count > 0) return color.yellow('○');
- return color.dim('○');
- };
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: fieldCoverage.email },
+ { label: 'email confirmed', count: fieldCoverage.emailConfirmed },
+ { label: 'have password hash', count: fieldCoverage.password },
+ { label: 'have phone', count: fieldCoverage.phone },
+ { label: 'have first name', count: fieldCoverage.firstName },
+ { label: 'have last name', count: fieldCoverage.lastName },
+ ],
+ userCount,
+ outputPath
+ );
+}
- let summary = '';
- summary += `${getCoverageIcon(fieldCoverage.email, userCount)} ${color.dim(`${fieldCoverage.email}/${userCount} have email`)}\n`;
- summary += `${getCoverageIcon(fieldCoverage.emailConfirmed, userCount)} ${color.dim(`${fieldCoverage.emailConfirmed}/${userCount} email confirmed`)}\n`;
- summary += `${getCoverageIcon(fieldCoverage.password, userCount)} ${color.dim(`${fieldCoverage.password}/${userCount} have password hash`)}\n`;
- summary += `${getCoverageIcon(fieldCoverage.phone, userCount)} ${color.dim(`${fieldCoverage.phone}/${userCount} have phone`)}\n`;
- summary += `${getCoverageIcon(fieldCoverage.firstName, userCount)} ${color.dim(`${fieldCoverage.firstName}/${userCount} have first name`)}\n`;
- summary += `${getCoverageIcon(fieldCoverage.lastName, userCount)} ${color.dim(`${fieldCoverage.lastName}/${userCount} have last name`)}`;
-
- p.note(summary, 'Field Coverage');
- p.log.success(
- `Exported ${color.bold(String(userCount))} users to ${color.dim(outputPath)}`
+/**
+ * CLI wrapper for the Supabase export command
+ *
+ * Prompts for a connection string if not provided via --db-url flag or
+ * SUPABASE_DB_URL environment variable, then exports users to a JSON file.
+ */
+export async function runSupabaseExport(): Promise {
+ p.intro(color.bgCyan(color.black('Supabase User Export')));
+
+ const {
+ dbUrl: resolvedUrl,
+ outputFile,
+ warning,
+ } = resolveConnectionString(
+ process.argv.slice(2),
+ process.env as Record
);
+
+ let dbUrl = resolvedUrl;
+
+ if (warning) {
+ p.log.warn(color.yellow(warning));
+ }
+
+ // Prompt for connection string if not resolved from flag or env
+ if (!dbUrl) {
+ p.note(
+ `Find this in the Supabase Dashboard by clicking the ${color.bold('Connect')} button.\n\n${color.bold('Direct connection')} (requires IPv4 add-on):\n ${color.dim('postgresql://postgres:[PASSWORD]@db.[REF].supabase.co:5432/postgres')}\n\n${color.bold('Pooler connection')} (works without IPv4 add-on):\n ${color.dim('postgres://postgres.[REF]:[PASSWORD]@aws-0-[REGION].pooler.supabase.com:6543/postgres')}`,
+ 'Connection String'
+ );
+
+ const input = await p.text({
+ message: 'Enter your Supabase Postgres connection string',
+ placeholder:
+ 'postgresql://postgres:[PASSWORD]@db.[REF].supabase.co:5432/postgres',
+ validate: (value) => {
+ if (!value || value.trim() === '') {
+ return 'Connection string is required';
+ }
+ if (!isValidConnectionString(value)) {
+ return 'Must be a valid Postgres connection string (postgresql://...)';
+ }
+ },
+ });
+
+ if (p.isCancel(input)) {
+ p.cancel('Export cancelled.');
+ process.exit(0);
+ }
+
+ dbUrl = input;
+ }
+
+ const spinner = p.spinner();
+ spinner.start('Connecting to Supabase database...');
+
+ try {
+ const result = await exportSupabaseUsers(dbUrl, outputFile);
+ spinner.stop(`Found ${result.userCount} users`);
+
+ displayExportSummary(result);
+
+ p.log.info(
+ color.dim(
+ `Next step: run ${color.bold('bun run migrate')} and select "Supabase" with file "exports/${outputFile}"`
+ )
+ );
+
+ p.outro(color.green('Export complete!'));
+ } catch (err) {
+ spinner.stop('Export failed');
+ const message = err instanceof Error ? err.message : String(err);
+ p.log.error(color.red(message));
+ process.exit(1);
+ }
}
diff --git a/src/lib/analysis.ts b/src/lib/analysis.ts
new file mode 100644
index 0000000..9934ef0
--- /dev/null
+++ b/src/lib/analysis.ts
@@ -0,0 +1,164 @@
+import type { FieldAnalysis, IdentifierCounts } from '../types';
+import { getDateTimeStamp } from './index';
+import { validationLogger } from '../logger';
+import { userSchema } from '../migrate/validator';
+import { transformers } from '../transformers';
+
+// Fields to analyze for the import (non-identifier fields)
+export const ANALYZED_FIELDS = [
+ { key: 'firstName', label: 'First Name' },
+ { key: 'lastName', label: 'Last Name' },
+ { key: 'password', label: 'Password' },
+ { key: 'totpSecret', label: 'TOTP Secret' },
+];
+
+/**
+ * Checks if a value exists and is not empty
+ *
+ * Returns false for undefined, null, empty strings, and empty arrays.
+ * Returns true for all other values including 0, false, and non-empty objects.
+ *
+ * @param value - The value to check
+ * @returns true if the value has meaningful content, false otherwise
+ */
+export const hasValue = (value: unknown): boolean => {
+ if (value === undefined || value === null || value === '') return false;
+ if (Array.isArray(value)) return value.length > 0;
+ return true;
+};
+
+/**
+ * Analyzes user data to determine field presence and identifier coverage
+ *
+ * Examines all users to count:
+ * - How many users have each field (firstName, lastName, password, totpSecret)
+ * - Identifier coverage (verified/unverified emails and phones, usernames)
+ * - Whether all users have at least one valid identifier
+ *
+ * Used to provide feedback about Dashboard configuration requirements.
+ *
+ * @param users - Array of user objects to analyze
+ * @returns Field analysis object with counts and identifier statistics
+ */
+export function analyzeFields(users: Record[]): FieldAnalysis {
+ const totalUsers = users.length;
+
+ if (totalUsers === 0) {
+ return {
+ presentOnAll: [],
+ presentOnSome: [],
+ identifiers: {
+ verifiedEmails: 0,
+ unverifiedEmails: 0,
+ verifiedPhones: 0,
+ unverifiedPhones: 0,
+ username: 0,
+ hasAnyIdentifier: 0,
+ },
+ totalUsers: 0,
+ fieldCounts: {},
+ };
+ }
+
+ const fieldCounts: Record = {};
+ const identifiers: IdentifierCounts = {
+ verifiedEmails: 0,
+ unverifiedEmails: 0,
+ verifiedPhones: 0,
+ unverifiedPhones: 0,
+ username: 0,
+ hasAnyIdentifier: 0,
+ };
+
+ // Count how many users have each field
+ for (const user of users) {
+ // Count non-identifier fields
+ for (const field of ANALYZED_FIELDS) {
+ if (hasValue(user[field.key])) {
+ fieldCounts[field.key] = (fieldCounts[field.key] || 0) + 1;
+ }
+ }
+
+ // Count consolidated identifier fields
+ const hasVerifiedEmail =
+ hasValue(user.email) || hasValue(user.emailAddresses);
+ const hasUnverifiedEmail = hasValue(user.unverifiedEmailAddresses);
+ const hasVerifiedPhone =
+ hasValue(user.phone) || hasValue(user.phoneNumbers);
+ const hasUnverifiedPhone = hasValue(user.unverifiedPhoneNumbers);
+ const hasUsername = hasValue(user.username);
+
+ if (hasVerifiedEmail) identifiers.verifiedEmails++;
+ if (hasUnverifiedEmail) identifiers.unverifiedEmails++;
+ if (hasVerifiedPhone) identifiers.verifiedPhones++;
+ if (hasUnverifiedPhone) identifiers.unverifiedPhones++;
+ if (hasUsername) identifiers.username++;
+
+ // Check if user has at least one valid identifier
+ if (hasVerifiedEmail || hasVerifiedPhone || hasUsername) {
+ identifiers.hasAnyIdentifier++;
+ }
+ }
+
+ const presentOnAll: string[] = [];
+ const presentOnSome: string[] = [];
+
+ for (const field of ANALYZED_FIELDS) {
+ const count = fieldCounts[field.key] || 0;
+ if (count === totalUsers) {
+ presentOnAll.push(field.label);
+ } else if (count > 0) {
+ presentOnSome.push(field.label);
+ }
+ }
+
+ return { presentOnAll, presentOnSome, identifiers, totalUsers, fieldCounts };
+}
+
+/**
+ * Validates users against the schema and logs validation errors.
+ *
+ * Runs before the readiness display so users can see the validation failure
+ * count and review the log file before confirming the migration.
+ *
+ * Applies transformer default fields (e.g., Supabase passwordHasher: "bcrypt")
+ * before validation to match the behavior of the full import pipeline.
+ *
+ * @param users - Array of transformed user objects from loadRawUsers()
+ * @param transformerKey - Transformer key to look up default fields
+ * @returns Object with validation failure count and log file path
+ */
+export function validateUsers(
+ users: Record[],
+ transformerKey: string
+): { validationFailed: number; logFile: string } {
+ const dateTime = getDateTimeStamp();
+ const logFile = `migration-${dateTime}.log`;
+ let validationFailed = 0;
+
+ // Look up transformer defaults (e.g., Supabase adds passwordHasher: "bcrypt")
+ const transformer = transformers.find((obj) => obj.key === transformerKey);
+ const defaultFields =
+ transformer && 'defaults' in transformer ? transformer.defaults : null;
+
+ for (let i = 0; i < users.length; i++) {
+ const user = defaultFields ? { ...users[i], ...defaultFields } : users[i];
+ const result = userSchema.safeParse(user);
+
+ if (!result.success) {
+ validationFailed++;
+ const firstIssue = result.error.issues[0];
+ validationLogger(
+ {
+ error: firstIssue.message,
+ path: firstIssue.path as (string | number)[],
+ userId: (user.userId as string) || `row-${i}`,
+ row: i,
+ },
+ dateTime
+ );
+ }
+ }
+
+ return { validationFailed, logFile };
+}
diff --git a/src/lib/clerk.ts b/src/lib/clerk.ts
new file mode 100644
index 0000000..fe532d6
--- /dev/null
+++ b/src/lib/clerk.ts
@@ -0,0 +1,82 @@
+import { env } from '../envs-constants';
+
+export interface ClerkConfig {
+ attributes: Partial>;
+ social: Partial>;
+}
+
+/**
+ * Decodes a Clerk publishable key to extract the frontend API hostname.
+ *
+ * Format: pk_test_ or pk_live_
+ * The base64 payload decodes to a hostname ending with '$'.
+ *
+ * @param key - The Clerk publishable key
+ * @returns The frontend API hostname, or null if decoding fails
+ */
+function decodePublishableKey(key: string): string | null {
+ if (!key.startsWith('pk_test_') && !key.startsWith('pk_live_')) {
+ return null;
+ }
+ try {
+ const base64Part = key.split('_')[2];
+ const decoded = Buffer.from(base64Part, 'base64').toString();
+ if (!decoded.endsWith('$') || !decoded.includes('.')) {
+ return null;
+ }
+ return decoded.slice(0, -1);
+ } catch {
+ return null;
+ }
+}
+
+/**
+ * Fetches the Clerk instance configuration via the Frontend API.
+ *
+ * Decodes the publishable key to derive the FAPI hostname, then calls
+ * GET /v1/environment to retrieve auth settings, social connections,
+ * and user model configuration.
+ *
+ * @param publishableKey - The Clerk publishable key (pk_test_... or pk_live_...)
+ * @returns Clerk configuration with attributes and social connections, or null on failure
+ */
+export async function fetchClerkConfig(
+ publishableKey: string
+): Promise {
+ const frontendApi = decodePublishableKey(publishableKey);
+ if (!frontendApi) return null;
+
+ try {
+ const res = await fetch(`https://${frontendApi}/v1/environment`);
+ if (!res.ok) return null;
+
+ const data = (await res.json()) as {
+ user_settings?: {
+ attributes?: Record;
+ social?: Record;
+ };
+ };
+ const userSettings = data.user_settings;
+ if (!userSettings) return null;
+
+ return {
+ attributes: userSettings.attributes || {},
+ social: userSettings.social || {},
+ };
+ } catch {
+ return null;
+ }
+}
+
+/**
+ * Detects whether the Clerk instance is development or production based on the secret key
+ *
+ * @returns "dev" if the secret key starts with "sk_test_", otherwise "prod"
+ */
+export const detectInstanceType = (): 'dev' | 'prod' => {
+ const secretKey = env.CLERK_SECRET_KEY;
+ if (secretKey.startsWith('sk_test_')) {
+ return 'dev';
+ }
+ return 'prod';
+};
diff --git a/src/lib/db.ts b/src/lib/db.ts
new file mode 100644
index 0000000..ad3214b
--- /dev/null
+++ b/src/lib/db.ts
@@ -0,0 +1,137 @@
+/**
+ * Database abstraction layer
+ *
+ * Provides a unified interface for querying PostgreSQL, MySQL, and SQLite
+ * databases. Export modules use this to support multiple database types
+ * without needing to know which driver is in use.
+ *
+ * Usage:
+ * const client = await createDbClient('postgresql://...');
+ * const { rows } = await client.query('SELECT * FROM users');
+ * await client.end();
+ */
+import { Client as PgClient } from 'pg';
+
+export type DbType = 'postgres' | 'mysql' | 'sqlite';
+
+export interface DbClient {
+ query>(
+ sql: string,
+ params?: unknown[]
+ ): Promise<{ rows: T[] }>;
+ end(): Promise;
+ dbType: DbType;
+}
+
+/**
+ * Detects the database type from a connection string.
+ *
+ * - `postgresql://` or `postgres://` → postgres
+ * - `mysql://` or `mysql2://` → mysql
+ * - Anything else (file path, `file:`, `.sqlite`, `.db`) → sqlite
+ *
+ * @param connectionString - Database connection string or file path
+ * @returns The detected database type
+ */
+export function detectDbType(connectionString: string): DbType {
+ const lower = connectionString.toLowerCase();
+ if (lower.startsWith('postgresql://') || lower.startsWith('postgres://')) {
+ return 'postgres';
+ }
+ if (lower.startsWith('mysql://') || lower.startsWith('mysql2://')) {
+ return 'mysql';
+ }
+ return 'sqlite';
+}
+
+/**
+ * Creates a database client for the given connection string.
+ *
+ * Automatically detects the database type and returns a unified client
+ * that normalizes query results to `{ rows: T[] }`.
+ *
+ * @param connectionString - Database connection string or file path
+ * @returns Connected database client
+ */
+export async function createDbClient(
+ connectionString: string
+): Promise {
+ const dbType = detectDbType(connectionString);
+
+ if (dbType === 'postgres') {
+ return createPostgresClient(connectionString);
+ }
+
+ if (dbType === 'mysql') {
+ return createMysqlClient(connectionString);
+ }
+
+ return createSqliteClient(connectionString);
+}
+
+async function createPostgresClient(
+ connectionString: string
+): Promise {
+ const client = new PgClient({ connectionString });
+ await client.connect();
+
+ return {
+ dbType: 'postgres',
+ async query>(
+ sql: string,
+ params?: unknown[]
+ ): Promise<{ rows: T[] }> {
+ const result = await client.query(sql, params);
+ return { rows: result.rows };
+ },
+ async end(): Promise {
+ await client.end();
+ },
+ };
+}
+
+async function createMysqlClient(connectionString: string): Promise {
+ const mysql = await import('mysql2/promise');
+ const connection = await mysql.createConnection(connectionString);
+
+ return {
+ dbType: 'mysql',
+ async query>(
+ sql: string,
+ params?: unknown[]
+ ): Promise<{ rows: T[] }> {
+ const [rows] = await connection.execute(sql, params);
+ return { rows: rows as T[] };
+ },
+ async end(): Promise {
+ await connection.end();
+ },
+ };
+}
+
+async function createSqliteClient(connectionString: string): Promise {
+ const BetterSqlite3 = (await import('better-sqlite3')).default;
+
+ // Strip file: prefix if present
+ let filePath = connectionString;
+ if (filePath.startsWith('file:')) {
+ filePath = filePath.slice(5);
+ }
+
+ const db = new BetterSqlite3(filePath);
+
+ return {
+ dbType: 'sqlite',
+ query>(
+ sql: string,
+ _params?: unknown[]
+ ): Promise<{ rows: T[] }> {
+ const rows = db.prepare(sql).all() as T[];
+ return Promise.resolve({ rows });
+ },
+ end(): Promise {
+ db.close();
+ return Promise.resolve();
+ },
+ };
+}
diff --git a/src/lib/export.ts b/src/lib/export.ts
new file mode 100644
index 0000000..4a8c138
--- /dev/null
+++ b/src/lib/export.ts
@@ -0,0 +1,127 @@
+/**
+ * Shared utilities for export modules
+ *
+ * Provides common functionality used across all platform-specific exports:
+ * - Coverage icon display
+ * - File writing
+ * - Field coverage summary display
+ * - Database connection error hints
+ */
+import fs from 'fs';
+import path from 'path';
+import * as p from '@clack/prompts';
+import color from 'picocolors';
+
+/**
+ * Returns a colored circle icon based on how many users have a given field.
+ *
+ * - green filled circle — all users have this field
+ * - yellow open circle — some users have this field
+ * - dim open circle — no users have this field
+ *
+ * @param count - Number of users that have the field
+ * @param total - Total number of users
+ * @returns Colored icon string
+ */
+export function getCoverageIcon(count: number, total: number): string {
+ if (count === total) return color.green('●');
+ if (count > 0) return color.yellow('○');
+ return color.dim('○');
+}
+
+/**
+ * Writes exported data to a JSON file in the exports/ directory.
+ *
+ * Creates the exports/ directory if it doesn't exist. Handles both
+ * absolute and relative file paths — relative paths are resolved
+ * inside the exports/ directory.
+ *
+ * @param data - Array of user data to write
+ * @param outputFile - Output file path (absolute or relative to exports/)
+ * @returns The resolved absolute output path
+ */
+export function writeExportOutput(data: unknown[], outputFile: string): string {
+ const exportsDir = path.join(process.cwd(), 'exports');
+ fs.mkdirSync(exportsDir, { recursive: true });
+
+ const outputPath = path.isAbsolute(outputFile)
+ ? outputFile
+ : path.join(exportsDir, outputFile);
+
+ fs.writeFileSync(outputPath, JSON.stringify(data, null, 2));
+
+ return outputPath;
+}
+
+/**
+ * Displays a field coverage summary using @clack/prompts.
+ *
+ * Shows each field with a colored icon indicating coverage level,
+ * followed by a success message with the output path.
+ *
+ * @param fields - Array of field labels and their counts
+ * @param userCount - Total number of exported users
+ * @param outputPath - Path where the export file was written
+ */
+export function displayFieldCoverage(
+ fields: { label: string; count: number }[],
+ userCount: number,
+ outputPath: string
+): void {
+ const summary = fields
+ .map(
+ ({ label, count }) =>
+ `${getCoverageIcon(count, userCount)} ${color.dim(`${count}/${userCount} ${label}`)}`
+ )
+ .join('\n');
+
+ p.note(summary, 'Field Coverage');
+ p.log.success(
+ `Exported ${color.bold(String(userCount))} users to ${color.dim(outputPath)}`
+ );
+}
+
+/**
+ * Returns a human-readable hint for common database connection errors.
+ *
+ * Checks the error message for known patterns (DNS resolution, timeout,
+ * network unreachable, authentication) and returns platform-appropriate
+ * guidance.
+ *
+ * @param message - The error message from the connection attempt
+ * @param platform - Optional platform name for tailored hints ('supabase' | 'betterauth')
+ * @returns A hint string to help the user resolve the connection issue
+ */
+export function getDbConnectionErrorHint(
+ message: string,
+ platform?: 'supabase' | 'betterauth' | 'authjs'
+): string {
+ if (message.includes('ENOTFOUND')) {
+ if (platform === 'supabase') {
+ return 'The hostname could not be resolved. Check the project ref in your connection string.';
+ }
+ return 'The hostname could not be resolved. Check the host in your connection string.';
+ }
+
+ if (message.includes('ETIMEDOUT') || message.includes('ENETUNREACH')) {
+ if (platform === 'supabase') {
+ return (
+ 'Direct connections require the IPv4 add-on. Use a pooler connection instead,\n' +
+ 'or enable IPv4 in Supabase Dashboard → Settings → Add-Ons.'
+ );
+ }
+ return 'The database server is unreachable. Check the host and port in your connection string.';
+ }
+
+ if (
+ message.includes('authentication failed') ||
+ message.includes('password')
+ ) {
+ return 'Check the password in your connection string.';
+ }
+
+ if (platform === 'supabase') {
+ return 'Verify your connection string and ensure your Supabase project is accessible.';
+ }
+ return 'Verify your connection string and ensure the database is accessible.';
+}
diff --git a/src/utils.ts b/src/lib/index.ts
similarity index 72%
rename from src/utils.ts
rename to src/lib/index.ts
index 31f041b..c386098 100644
--- a/src/utils.ts
+++ b/src/lib/index.ts
@@ -2,6 +2,13 @@ import path from 'path';
import mime from 'mime-types';
import fs from 'fs';
+export {
+ getCoverageIcon,
+ writeExportOutput,
+ displayFieldCoverage,
+ getDbConnectionErrorHint,
+} from './export';
+
/**
* Gets the current date and time in ISO format without milliseconds
* @returns A string in the format YYYY-MM-DDTHH:mm:ss
@@ -18,7 +25,7 @@ export const getDateTimeStamp = () => {
* @returns The absolute file path
*/
export const createImportFilePath = (file: string) => {
- return path.join(__dirname, '..', file);
+ return path.join(__dirname, '..', '..', file);
};
/**
@@ -192,49 +199,89 @@ export function getRetryDelay(
}
/**
- * Checks if a string is a valid Postgres connection string.
+ * Checks if a string is a valid database connection string.
+ *
+ * Accepts PostgreSQL, MySQL, and SQLite connection strings:
+ * - `postgresql://` or `postgres://` — validated as URL
+ * - `mysql://` or `mysql2://` — validated as URL
+ * - File paths (for SQLite) — `.sqlite`, `.db`, `.sqlite3` extensions, or `file:` prefix
*
- * Verifies the value starts with postgresql:// or postgres:// and is
- * parseable as a URL. Passwords with special characters (like @, #, %)
- * must be URL-encoded for the string to be valid.
+ * Passwords with special characters (like @, #, %) must be URL-encoded
+ * for URL-based connection strings.
*
* @param value - The string to check
- * @returns true if the value is a parseable Postgres URL
+ * @returns true if the value is a valid database connection string
*/
export const isValidConnectionString = (value: string): boolean => {
- if (!value.startsWith('postgresql://') && !value.startsWith('postgres://')) {
- return false;
+ const lower = value.toLowerCase();
+
+ // PostgreSQL
+ if (lower.startsWith('postgresql://') || lower.startsWith('postgres://')) {
+ try {
+ new URL(value);
+ return true;
+ } catch {
+ return false;
+ }
}
- try {
- new URL(value);
+
+ // MySQL
+ if (lower.startsWith('mysql://') || lower.startsWith('mysql2://')) {
+ try {
+ new URL(value);
+ return true;
+ } catch {
+ return false;
+ }
+ }
+
+ // SQLite — file: prefix or file path with known extension
+ if (lower.startsWith('file:')) {
return true;
- } catch {
- return false;
}
+ if (
+ lower.endsWith('.sqlite') ||
+ lower.endsWith('.sqlite3') ||
+ lower.endsWith('.db')
+ ) {
+ return true;
+ }
+
+ return false;
};
/**
- * Resolves the database connection string from CLI args and environment variables.
+ * Resolves a database connection string from CLI args and environment variables.
*
- * Priority: --db-url flag > SUPABASE_DB_URL env var > interactive prompt
+ * Priority: --db-url flag > environment variable > interactive prompt
*
* Returns the resolved URL and an optional warning if an env var was present
* but had an invalid format.
*
* @param cliArgs - Raw CLI arguments (process.argv.slice(2))
* @param env - Environment variables to check
+ * @param options - Optional configuration for env var name and default output file
* @returns Object with resolved dbUrl (undefined if not found) and optional warning
*/
export function resolveConnectionString(
cliArgs: string[],
- env: Record
+ env: Record,
+ options: {
+ envVarName?: string;
+ defaultOutputFile?: string;
+ } = {}
): {
dbUrl: string | undefined;
outputFile: string;
warning: string | undefined;
} {
+ const {
+ envVarName = 'SUPABASE_DB_URL',
+ defaultOutputFile = 'supabase-export.json',
+ } = options;
+
let dbUrl: string | undefined;
- let outputFile = 'supabase-export.json';
+ let outputFile = defaultOutputFile;
let warning: string | undefined;
// Parse CLI flags
@@ -250,14 +297,42 @@ export function resolveConnectionString(
// Fall back to env vars if no --db-url flag, validating format
if (!dbUrl) {
- const envUrl = env.SUPABASE_DB_URL;
+ const envUrl = env[envVarName];
if (envUrl && isValidConnectionString(envUrl)) {
dbUrl = envUrl;
} else if (envUrl) {
- warning =
- 'Connection string from environment is not a valid Postgres URL — prompting instead.';
+ warning = `Connection string from ${envVarName} is not a valid database connection string — prompting instead.`;
}
}
return { dbUrl, outputFile, warning };
}
+
+/**
+ * Normalizes error messages by sorting field arrays to group similar errors
+ *
+ * Example: Converts both:
+ * - ["first_name" "last_name"] data doesn't match...
+ * - ["last_name" "first_name"] data doesn't match...
+ * into: ["first_name" "last_name"] data doesn't match...
+ *
+ * @param errorMessage - The original error message
+ * @returns The normalized error message with sorted field arrays
+ */
+export function normalizeErrorMessage(errorMessage: string): string {
+ // Match array-like patterns in error messages: ["field1" "field2"]
+ const arrayPattern = /\[([^\]]+)\]/g;
+
+ return errorMessage.replace(arrayPattern, (_match, fields: string) => {
+ // Split by spaces and quotes, filter out empty strings
+ const fieldNames = fields
+ .split(/["'\s]+/)
+ .filter((f: string) => f.trim().length > 0);
+
+ // Sort field names alphabetically
+ fieldNames.sort();
+
+ // Reconstruct the array notation
+ return `[${fieldNames.map((f: string) => `"${f}"`).join(' ')}]`;
+ });
+}
diff --git a/src/lib/settings.ts b/src/lib/settings.ts
new file mode 100644
index 0000000..05edfca
--- /dev/null
+++ b/src/lib/settings.ts
@@ -0,0 +1,43 @@
+import fs from 'fs';
+import path from 'path';
+import type { Settings } from '../types';
+
+const SETTINGS_FILE = '.settings';
+
+/**
+ * Loads saved settings from the .settings file in the current directory
+ *
+ * Reads previously saved migration parameters to use as defaults in the CLI.
+ * Returns an empty object if the file doesn't exist or is corrupted.
+ *
+ * @returns The saved settings object with key and file properties
+ */
+export const loadSettings = (): Settings => {
+ try {
+ const settingsPath = path.join(process.cwd(), SETTINGS_FILE);
+ if (fs.existsSync(settingsPath)) {
+ const content = fs.readFileSync(settingsPath, 'utf-8');
+ return JSON.parse(content) as Settings;
+ }
+ } catch {
+ // If settings file is corrupted or unreadable, return empty settings
+ }
+ return {};
+};
+
+/**
+ * Saves migration settings to the .settings file in the current directory
+ *
+ * Persists the current migration parameters (transformer key, file path)
+ * so they can be used as defaults in future runs. Fails silently if unable to write.
+ *
+ * @param settings - The settings object to save
+ */
+export const saveSettings = (settings: Settings): void => {
+ try {
+ const settingsPath = path.join(process.cwd(), SETTINGS_FILE);
+ fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
+ } catch {
+ // Silently fail if we can't write settings
+ }
+};
diff --git a/src/lib/supabase.ts b/src/lib/supabase.ts
new file mode 100644
index 0000000..9589f9e
--- /dev/null
+++ b/src/lib/supabase.ts
@@ -0,0 +1,163 @@
+import fs from 'fs';
+
+// Maps Supabase provider keys to human-readable labels
+export const OAUTH_PROVIDER_LABELS: Record = {
+ google: 'Google',
+ apple: 'Apple',
+ github: 'GitHub',
+ facebook: 'Facebook',
+ twitter: 'Twitter (X)',
+ discord: 'Discord',
+ spotify: 'Spotify',
+ slack: 'Slack',
+ slack_oidc: 'Slack (OIDC)',
+ twitch: 'Twitch',
+ linkedin: 'LinkedIn',
+ linkedin_oidc: 'LinkedIn (OIDC)',
+ bitbucket: 'Bitbucket',
+ gitlab: 'GitLab',
+ azure: 'Microsoft (Azure)',
+ kakao: 'Kakao',
+ notion: 'Notion',
+ zoom: 'Zoom',
+ keycloak: 'Keycloak',
+ figma: 'Figma',
+ fly: 'Fly.io',
+ workos: 'WorkOS',
+ snapchat: 'Snapchat',
+};
+
+// Non-OAuth entries in the Supabase external config to ignore
+export const IGNORED_PROVIDERS = new Set(['email', 'phone', 'anonymous_users']);
+
+interface SupabaseAuthSettings {
+ external?: Record;
+}
+
+/**
+ * Fetches the Supabase project's auth settings to determine which OAuth providers are enabled.
+ *
+ * Calls GET {supabaseUrl}/auth/v1/settings with the API key. This endpoint returns
+ * the `external` config object with a boolean for each provider (google, apple, etc.).
+ *
+ * @param supabaseUrl - The Supabase project URL (e.g., https://xxx.supabase.co)
+ * @param apiKey - Any valid Supabase API key (anon or service role)
+ * @returns List of enabled OAuth provider keys, or null if the fetch failed
+ */
+export async function fetchSupabaseProviders(
+ supabaseUrl: string,
+ apiKey: string
+): Promise {
+ try {
+ const url = `${supabaseUrl.replace(/\/$/, '')}/auth/v1/settings`;
+ const res = await fetch(url, {
+ headers: { apikey: apiKey },
+ });
+
+ if (!res.ok) {
+ return null;
+ }
+
+ const settings = (await res.json()) as SupabaseAuthSettings;
+ if (!settings.external) {
+ return null;
+ }
+
+ return Object.entries(settings.external)
+ .filter(([key, enabled]) => enabled && !IGNORED_PROVIDERS.has(key))
+ .map(([key]) => key);
+ } catch {
+ return null;
+ }
+}
+
+/**
+ * Analyzes the raw export data to count users per auth provider.
+ *
+ * Reads raw_app_meta_data.providers from each user record in the JSON file.
+ * This runs on the raw (pre-transformation) data since the transformer
+ * doesn't map raw_app_meta_data.
+ *
+ * @param filePath - Path to the JSON export file
+ * @returns Map of provider name to user count (e.g., { email: 142, discord: 5 })
+ */
+export function analyzeUserProviders(filePath: string): Record {
+ try {
+ const raw = JSON.parse(fs.readFileSync(filePath, 'utf-8')) as Record<
+ string,
+ unknown
+ >[];
+ const counts: Record = {};
+
+ for (const user of raw) {
+ const appMeta = user.raw_app_meta_data as
+ | Record
+ | undefined;
+ if (!appMeta?.providers) continue;
+
+ const providers = appMeta.providers as string[];
+ for (const provider of providers) {
+ counts[provider] = (counts[provider] || 0) + 1;
+ }
+ }
+
+ return counts;
+ } catch {
+ return {};
+ }
+}
+
+/**
+ * Finds user IDs whose only providers are disabled social providers.
+ *
+ * Reads the raw export file and checks each user's raw_app_meta_data.providers.
+ * A user is excluded only if ALL of their providers are disabled social providers —
+ * users with at least one supported provider (email, phone, or an enabled social
+ * provider) are never excluded.
+ *
+ * @param filePath - Path to the JSON export file
+ * @param disabledProviders - List of provider names not enabled in Clerk (e.g., ['discord'])
+ * @returns Object with excluded user IDs and per-provider counts of exclusively-affected users
+ */
+export function findUsersWithDisabledProviders(
+ filePath: string,
+ disabledProviders: string[]
+): { excludedIds: Set; exclusionsByProvider: Record } {
+ if (disabledProviders.length === 0)
+ return { excludedIds: new Set(), exclusionsByProvider: {} };
+
+ try {
+ const raw = JSON.parse(fs.readFileSync(filePath, 'utf-8')) as Record<
+ string,
+ unknown
+ >[];
+ const excludedIds = new Set();
+ const exclusionsByProvider: Record = {};
+ const disabledSet = new Set(disabledProviders);
+
+ for (const user of raw) {
+ const appMeta = user.raw_app_meta_data as
+ | Record
+ | undefined;
+ if (!appMeta?.providers) continue;
+
+ const providers = appMeta.providers as string[];
+ const hasSupportedProvider = providers.some(
+ (p) => IGNORED_PROVIDERS.has(p) || !disabledSet.has(p)
+ );
+
+ if (!hasSupportedProvider) {
+ excludedIds.add(user.id as string);
+ const disabledForUser = providers.filter((p) => disabledSet.has(p));
+ for (const provider of disabledForUser) {
+ exclusionsByProvider[provider] =
+ (exclusionsByProvider[provider] || 0) + 1;
+ }
+ }
+ }
+
+ return { excludedIds, exclusionsByProvider };
+ } catch {
+ return { excludedIds: new Set(), exclusionsByProvider: {} };
+ }
+}
diff --git a/src/logger.ts b/src/logger.ts
index 81078c2..fe7fc88 100644
--- a/src/logger.ts
+++ b/src/logger.ts
@@ -4,6 +4,7 @@ import type {
DeleteLogEntry,
ErrorLog,
ErrorPayload,
+ ExportLogEntry,
ImportLogEntry,
ValidationErrorPayload,
} from './types';
@@ -93,7 +94,7 @@ function logErrorPayload(
* @param logFile - The log file name (e.g., 'migration' or 'user-deletion')
*/
function logEntry(
- entry: ImportLogEntry | DeleteLogEntry,
+ entry: ImportLogEntry | DeleteLogEntry | ExportLogEntry,
dateTime: string,
logFile: string
) {
@@ -154,3 +155,12 @@ export const deleteErrorLogger = (payload: ErrorPayload, dateTime: string) => {
export const deleteLogger = (entry: DeleteLogEntry, dateTime: string) => {
logEntry(entry, dateTime, 'user-deletion');
};
+
+/**
+ * Logs user export attempts
+ * @param entry - The export log entry containing user ID and status
+ * @param dateTime - The timestamp for the log file name (format: YYYY-MM-DDTHH:mm:ss)
+ */
+export const exportLogger = (entry: ExportLogEntry, dateTime: string) => {
+ logEntry(entry, dateTime, 'export');
+};
diff --git a/src/migrate/cli.ts b/src/migrate/cli.ts
index a371859..940001a 100644
--- a/src/migrate/cli.ts
+++ b/src/migrate/cli.ts
@@ -12,25 +12,25 @@ import {
import {
checkIfFileExists,
createImportFilePath,
- getDateTimeStamp,
getFileType,
transformKeys as transformKeysFromFunctions,
tryCatch,
-} from '../utils';
+} from '../lib';
import {
- env,
hasClerkSecretKey,
requireValidEnv,
setClerkSecretKey,
} from '../envs-constants';
-import type {
- FieldAnalysis,
- FirebaseHashConfig,
- IdentifierCounts,
- Settings,
-} from '../types';
-import { userSchema } from './validator';
-import { validationLogger } from '../logger';
+import type { FieldAnalysis, FirebaseHashConfig, Settings } from '../types';
+import { loadSettings, saveSettings } from '../lib/settings';
+import { analyzeFields, validateUsers } from '../lib/analysis';
+import {
+ analyzeUserProviders,
+ fetchSupabaseProviders,
+ findUsersWithDisabledProviders,
+ OAUTH_PROVIDER_LABELS,
+} from '../lib/supabase';
+import { detectInstanceType, fetchClerkConfig } from '../lib/clerk';
/**
* Parsed command-line arguments for the migration tool
@@ -52,8 +52,6 @@ export type CLIArgs = {
firebaseMemCost?: number;
};
-const SETTINGS_FILE = '.settings';
-
const DEV_USER_LIMIT = 500;
/**
@@ -167,7 +165,7 @@ async function ensureClerkSecretKey(
);
// eslint-disable-next-line no-console
console.error(
- 'You can find your secret key in the Clerk Dashboard under API Keys.'
+ 'You can find your secret key in the Clerk Dashboard under API Keys: https://dashboard.clerk.com/~/api-keys'
);
return false;
}
@@ -176,7 +174,8 @@ async function ensureClerkSecretKey(
p.note(
`${color.yellow('CLERK_SECRET_KEY is not set.')}\n\n` +
`You can find your secret key in the Clerk Dashboard:\n` +
- `${color.cyan('Dashboard → API Keys → Secret keys')}\n\n` +
+ `${color.cyan('Dashboard → API Keys → Secret keys')}\n` +
+ `${color.dim('https://dashboard.clerk.com/~/api-keys')}\n\n` +
`Alternatively, create a ${color.bold('.env')} file with:\n` +
`${color.dim('CLERK_SECRET_KEY=sk_test_...')}`,
'Missing API Key'
@@ -571,65 +570,6 @@ export async function runNonInteractive(args: CLIArgs): Promise<{
};
}
-/**
- * Detects whether the Clerk instance is development or production based on the secret key
- *
- * @returns "dev" if the secret key starts with "sk_test_", otherwise "prod"
- */
-export const detectInstanceType = (): 'dev' | 'prod' => {
- const secretKey = env.CLERK_SECRET_KEY;
- if (secretKey.startsWith('sk_test_')) {
- return 'dev';
- }
- return 'prod';
-};
-
-// Fields to analyze for the import (non-identifier fields)
-const ANALYZED_FIELDS = [
- { key: 'firstName', label: 'First Name' },
- { key: 'lastName', label: 'Last Name' },
- { key: 'password', label: 'Password' },
- { key: 'totpSecret', label: 'TOTP Secret' },
-];
-
-/**
- * Loads saved settings from the .settings file in the current directory
- *
- * Reads previously saved migration parameters to use as defaults in the CLI.
- * Returns an empty object if the file doesn't exist or is corrupted.
- *
- * @returns The saved settings object with key and file properties
- */
-export const loadSettings = (): Settings => {
- try {
- const settingsPath = path.join(process.cwd(), SETTINGS_FILE);
- if (fs.existsSync(settingsPath)) {
- const content = fs.readFileSync(settingsPath, 'utf-8');
- return JSON.parse(content) as Settings;
- }
- } catch {
- // If settings file is corrupted or unreadable, return empty settings
- }
- return {};
-};
-
-/**
- * Saves migration settings to the .settings file in the current directory
- *
- * Persists the current migration parameters (transformer key, file path)
- * so they can be used as defaults in future runs. Fails silently if unable to write.
- *
- * @param settings - The settings object to save
- */
-export const saveSettings = (settings: Settings): void => {
- try {
- const settingsPath = path.join(process.cwd(), SETTINGS_FILE);
- fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
- } catch {
- // Silently fail if we can't write settings
- }
-};
-
/**
* Loads and transforms users from a file without validation
*
@@ -703,389 +643,6 @@ export const loadRawUsers = async (
return rawUsers.map((data) => transformUser(data));
};
-/**
- * Checks if a value exists and is not empty
- *
- * Returns false for undefined, null, empty strings, and empty arrays.
- * Returns true for all other values including 0, false, and non-empty objects.
- *
- * @param value - The value to check
- * @returns true if the value has meaningful content, false otherwise
- */
-export const hasValue = (value: unknown): boolean => {
- if (value === undefined || value === null || value === '') return false;
- if (Array.isArray(value)) return value.length > 0;
- return true;
-};
-
-/**
- * Analyzes user data to determine field presence and identifier coverage
- *
- * Examines all users to count:
- * - How many users have each field (firstName, lastName, password, totpSecret)
- * - Identifier coverage (verified/unverified emails and phones, usernames)
- * - Whether all users have at least one valid identifier
- *
- * Used to provide feedback about Dashboard configuration requirements.
- *
- * @param users - Array of user objects to analyze
- * @returns Field analysis object with counts and identifier statistics
- */
-export function analyzeFields(users: Record[]): FieldAnalysis {
- const totalUsers = users.length;
-
- if (totalUsers === 0) {
- return {
- presentOnAll: [],
- presentOnSome: [],
- identifiers: {
- verifiedEmails: 0,
- unverifiedEmails: 0,
- verifiedPhones: 0,
- unverifiedPhones: 0,
- username: 0,
- hasAnyIdentifier: 0,
- },
- totalUsers: 0,
- fieldCounts: {},
- };
- }
-
- const fieldCounts: Record = {};
- const identifiers: IdentifierCounts = {
- verifiedEmails: 0,
- unverifiedEmails: 0,
- verifiedPhones: 0,
- unverifiedPhones: 0,
- username: 0,
- hasAnyIdentifier: 0,
- };
-
- // Count how many users have each field
- for (const user of users) {
- // Count non-identifier fields
- for (const field of ANALYZED_FIELDS) {
- if (hasValue(user[field.key])) {
- fieldCounts[field.key] = (fieldCounts[field.key] || 0) + 1;
- }
- }
-
- // Count consolidated identifier fields
- const hasVerifiedEmail =
- hasValue(user.email) || hasValue(user.emailAddresses);
- const hasUnverifiedEmail = hasValue(user.unverifiedEmailAddresses);
- const hasVerifiedPhone =
- hasValue(user.phone) || hasValue(user.phoneNumbers);
- const hasUnverifiedPhone = hasValue(user.unverifiedPhoneNumbers);
- const hasUsername = hasValue(user.username);
-
- if (hasVerifiedEmail) identifiers.verifiedEmails++;
- if (hasUnverifiedEmail) identifiers.unverifiedEmails++;
- if (hasVerifiedPhone) identifiers.verifiedPhones++;
- if (hasUnverifiedPhone) identifiers.unverifiedPhones++;
- if (hasUsername) identifiers.username++;
-
- // Check if user has at least one valid identifier
- if (hasVerifiedEmail || hasVerifiedPhone || hasUsername) {
- identifiers.hasAnyIdentifier++;
- }
- }
-
- const presentOnAll: string[] = [];
- const presentOnSome: string[] = [];
-
- for (const field of ANALYZED_FIELDS) {
- const count = fieldCounts[field.key] || 0;
- if (count === totalUsers) {
- presentOnAll.push(field.label);
- } else if (count > 0) {
- presentOnSome.push(field.label);
- }
- }
-
- return { presentOnAll, presentOnSome, identifiers, totalUsers, fieldCounts };
-}
-
-/**
- * Validates users against the schema and logs validation errors.
- *
- * Runs before the readiness display so users can see the validation failure
- * count and review the log file before confirming the migration.
- *
- * Applies transformer default fields (e.g., Supabase passwordHasher: "bcrypt")
- * before validation to match the behavior of the full import pipeline.
- *
- * @param users - Array of transformed user objects from loadRawUsers()
- * @param transformerKey - Transformer key to look up default fields
- * @returns Object with validation failure count and log file path
- */
-export function validateUsers(
- users: Record[],
- transformerKey: string
-): { validationFailed: number; logFile: string } {
- const dateTime = getDateTimeStamp();
- const logFile = `migration-${dateTime}.log`;
- let validationFailed = 0;
-
- // Look up transformer defaults (e.g., Supabase adds passwordHasher: "bcrypt")
- const transformer = transformers.find((obj) => obj.key === transformerKey);
- const defaultFields =
- transformer && 'defaults' in transformer ? transformer.defaults : null;
-
- for (let i = 0; i < users.length; i++) {
- const user = defaultFields ? { ...users[i], ...defaultFields } : users[i];
- const result = userSchema.safeParse(user);
-
- if (!result.success) {
- validationFailed++;
- const firstIssue = result.error.issues[0];
- validationLogger(
- {
- error: firstIssue.message,
- path: firstIssue.path as (string | number)[],
- userId: (user.userId as string) || `row-${i}`,
- row: i,
- },
- dateTime
- );
- }
- }
-
- return { validationFailed, logFile };
-}
-
-// Maps Supabase provider keys to human-readable labels
-const OAUTH_PROVIDER_LABELS: Record = {
- google: 'Google',
- apple: 'Apple',
- github: 'GitHub',
- facebook: 'Facebook',
- twitter: 'Twitter (X)',
- discord: 'Discord',
- spotify: 'Spotify',
- slack: 'Slack',
- slack_oidc: 'Slack (OIDC)',
- twitch: 'Twitch',
- linkedin: 'LinkedIn',
- linkedin_oidc: 'LinkedIn (OIDC)',
- bitbucket: 'Bitbucket',
- gitlab: 'GitLab',
- azure: 'Microsoft (Azure)',
- kakao: 'Kakao',
- notion: 'Notion',
- zoom: 'Zoom',
- keycloak: 'Keycloak',
- figma: 'Figma',
- fly: 'Fly.io',
- workos: 'WorkOS',
- snapchat: 'Snapchat',
-};
-
-// Non-OAuth entries in the Supabase external config to ignore
-const IGNORED_PROVIDERS = new Set(['email', 'phone', 'anonymous_users']);
-
-interface SupabaseAuthSettings {
- external?: Record;
-}
-
-/**
- * Fetches the Supabase project's auth settings to determine which OAuth providers are enabled.
- *
- * Calls GET {supabaseUrl}/auth/v1/settings with the API key. This endpoint returns
- * the `external` config object with a boolean for each provider (google, apple, etc.).
- *
- * @param supabaseUrl - The Supabase project URL (e.g., https://xxx.supabase.co)
- * @param apiKey - Any valid Supabase API key (anon or service role)
- * @returns List of enabled OAuth provider keys, or null if the fetch failed
- */
-export async function fetchSupabaseProviders(
- supabaseUrl: string,
- apiKey: string
-): Promise {
- try {
- const url = `${supabaseUrl.replace(/\/$/, '')}/auth/v1/settings`;
- const res = await fetch(url, {
- headers: { apikey: apiKey },
- });
-
- if (!res.ok) {
- return null;
- }
-
- const settings = (await res.json()) as SupabaseAuthSettings;
- if (!settings.external) {
- return null;
- }
-
- return Object.entries(settings.external)
- .filter(([key, enabled]) => enabled && !IGNORED_PROVIDERS.has(key))
- .map(([key]) => key);
- } catch {
- return null;
- }
-}
-
-// --- Clerk Instance Configuration ---
-
-interface ClerkConfig {
- attributes: Partial>;
- social: Partial>;
-}
-
-/**
- * Decodes a Clerk publishable key to extract the frontend API hostname.
- *
- * Format: pk_test_ or pk_live_
- * The base64 payload decodes to a hostname ending with '$'.
- *
- * @param key - The Clerk publishable key
- * @returns The frontend API hostname, or null if decoding fails
- */
-function decodePublishableKey(key: string): string | null {
- if (!key.startsWith('pk_test_') && !key.startsWith('pk_live_')) {
- return null;
- }
- try {
- const base64Part = key.split('_')[2];
- const decoded = Buffer.from(base64Part, 'base64').toString();
- if (!decoded.endsWith('$') || !decoded.includes('.')) {
- return null;
- }
- return decoded.slice(0, -1);
- } catch {
- return null;
- }
-}
-
-/**
- * Fetches the Clerk instance configuration via the Frontend API.
- *
- * Decodes the publishable key to derive the FAPI hostname, then calls
- * GET /v1/environment to retrieve auth settings, social connections,
- * and user model configuration.
- *
- * @param publishableKey - The Clerk publishable key (pk_test_... or pk_live_...)
- * @returns Clerk configuration with attributes and social connections, or null on failure
- */
-export async function fetchClerkConfig(
- publishableKey: string
-): Promise {
- const frontendApi = decodePublishableKey(publishableKey);
- if (!frontendApi) return null;
-
- try {
- const res = await fetch(`https://${frontendApi}/v1/environment`);
- if (!res.ok) return null;
-
- const data = (await res.json()) as {
- user_settings?: {
- attributes?: Record;
- social?: Record;
- };
- };
- const userSettings = data.user_settings;
- if (!userSettings) return null;
-
- return {
- attributes: userSettings.attributes || {},
- social: userSettings.social || {},
- };
- } catch {
- return null;
- }
-}
-
-/**
- * Analyzes the raw export data to count users per auth provider.
- *
- * Reads raw_app_meta_data.providers from each user record in the JSON file.
- * This runs on the raw (pre-transformation) data since the transformer
- * doesn't map raw_app_meta_data.
- *
- * @param filePath - Path to the JSON export file
- * @returns Map of provider name to user count (e.g., { email: 142, discord: 5 })
- */
-export function analyzeUserProviders(filePath: string): Record {
- try {
- const raw = JSON.parse(fs.readFileSync(filePath, 'utf-8')) as Record<
- string,
- unknown
- >[];
- const counts: Record = {};
-
- for (const user of raw) {
- const appMeta = user.raw_app_meta_data as
- | Record
- | undefined;
- if (!appMeta?.providers) continue;
-
- const providers = appMeta.providers as string[];
- for (const provider of providers) {
- counts[provider] = (counts[provider] || 0) + 1;
- }
- }
-
- return counts;
- } catch {
- return {};
- }
-}
-
-/**
- * Finds user IDs whose only providers are disabled social providers.
- *
- * Reads the raw export file and checks each user's raw_app_meta_data.providers.
- * A user is excluded only if ALL of their providers are disabled social providers —
- * users with at least one supported provider (email, phone, or an enabled social
- * provider) are never excluded.
- *
- * @param filePath - Path to the JSON export file
- * @param disabledProviders - List of provider names not enabled in Clerk (e.g., ['discord'])
- * @returns Object with excluded user IDs and per-provider counts of exclusively-affected users
- */
-export function findUsersWithDisabledProviders(
- filePath: string,
- disabledProviders: string[]
-): { excludedIds: Set; exclusionsByProvider: Record } {
- if (disabledProviders.length === 0)
- return { excludedIds: new Set(), exclusionsByProvider: {} };
-
- try {
- const raw = JSON.parse(fs.readFileSync(filePath, 'utf-8')) as Record<
- string,
- unknown
- >[];
- const excludedIds = new Set();
- const exclusionsByProvider: Record = {};
- const disabledSet = new Set(disabledProviders);
-
- for (const user of raw) {
- const appMeta = user.raw_app_meta_data as
- | Record
- | undefined;
- if (!appMeta?.providers) continue;
-
- const providers = appMeta.providers as string[];
- const hasSupportedProvider = providers.some(
- (p) => IGNORED_PROVIDERS.has(p) || !disabledSet.has(p)
- );
-
- if (!hasSupportedProvider) {
- excludedIds.add(user.id as string);
- const disabledForUser = providers.filter((p) => disabledSet.has(p));
- for (const provider of disabledForUser) {
- exclusionsByProvider[provider] =
- (exclusionsByProvider[provider] || 0) + 1;
- }
- }
- }
-
- return { excludedIds, exclusionsByProvider };
- } catch {
- return { excludedIds: new Set(), exclusionsByProvider: {} };
- }
-}
-
// --- Cross-Reference Display ---
interface ReadinessItem {
@@ -1142,9 +699,11 @@ export function displayCrossReference(
} else if (configStatus.clerk === 'failed') {
message += ` ${color.yellow('⚠')} ${color.yellow('Could not fetch Clerk configuration')}\n`;
message += ` ${color.dim(' Verify your Clerk Dashboard settings match the report below')}\n`;
+ message += ` ${color.dim(' https://dashboard.clerk.com/~/api-keys')}\n`;
} else {
message += ` ${color.yellow('○')} ${color.dim('Add CLERK_PUBLISHABLE_KEY to .env and restart to enable automatic checking,')}\n`;
message += ` ${color.dim(' or verify your Clerk Dashboard settings match the report below')}\n`;
+ message += ` ${color.dim(' https://dashboard.clerk.com/~/api-keys')}\n`;
}
message += '\n';
@@ -1209,22 +768,27 @@ export function displayCrossReference(
if (isIdentifier && item.clerkRequired === true && !allUsers) {
const missing = total - item.userCount;
message += ` ${color.yellow('⚠')} ${item.label} — ${color.yellow('required in Clerk')} — ${color.dim(`${countStr} (${missing} will fail without ${item.label.toLowerCase()})`)}\n`;
+ message += ` ${color.dim(' https://dashboard.clerk.com/~/user-authentication')}\n`;
needsAttention.push(item);
} else {
message += ` ${color.green('✓')} ${item.label} — ${color.dim(`enabled in Clerk — ${countStr}`)}\n`;
}
} else if (item.clerkEnabled === false) {
message += ` ${color.red('✗')} ${item.label} — ${color.red('not enabled in Clerk')} — ${color.dim(countStr)}\n`;
+ message += ` ${color.dim(' https://dashboard.clerk.com/~/user-authentication')}\n`;
needsAttention.push(item);
} else if (isIdentifier && allUsers) {
// No Clerk config — all users have this identifier, safe to require
message += ` ${color.yellow('○')} ${item.label} — ${color.dim(`${countStr} — enable in Clerk Dashboard (can be required)`)}\n`;
+ message += ` ${color.dim(' https://dashboard.clerk.com/~/user-authentication')}\n`;
} else if (isIdentifier) {
// No Clerk config — not all users have this identifier, requiring would cause failures
message += ` ${color.yellow('○')} ${item.label} — ${color.dim(`${countStr} — enable in Clerk Dashboard (do not require)`)}\n`;
+ message += ` ${color.dim(' https://dashboard.clerk.com/~/user-authentication')}\n`;
} else {
// No Clerk config — non-identifier item
message += ` ${color.yellow('○')} ${item.label} — ${color.dim(`${countStr} — enable in Clerk Dashboard`)}\n`;
+ message += ` ${color.dim(' https://dashboard.clerk.com/~/user-authentication')}\n`;
}
}
diff --git a/src/migrate/functions.ts b/src/migrate/functions.ts
index 6ae365e..d26ac47 100644
--- a/src/migrate/functions.ts
+++ b/src/migrate/functions.ts
@@ -11,7 +11,7 @@ import {
getDateTimeStamp,
getFileType,
transformKeys,
-} from '../utils';
+} from '../lib';
// Re-export for backwards compatibility
export type { PreTransformResult } from '../types';
diff --git a/src/migrate/import-users.ts b/src/migrate/import-users.ts
index d296979..8b05d9f 100644
--- a/src/migrate/import-users.ts
+++ b/src/migrate/import-users.ts
@@ -4,7 +4,8 @@ import { env, MAX_RETRIES, RETRY_DELAY_MS } from '../envs-constants';
import * as p from '@clack/prompts';
import color from 'picocolors';
import { closeAllStreams, errorLogger, importLogger } from '../logger';
-import { getDateTimeStamp, getRetryDelay, tryCatch } from '../utils';
+import { getDateTimeStamp, getRetryDelay, tryCatch } from '../lib';
+import { normalizeErrorMessage } from '../lib';
import { userSchema } from './validator';
import type { ImportSummary, User } from '../types';
import pLimit from 'p-limit';
@@ -361,35 +362,6 @@ async function processUserToClerk(
);
}
-/**
- * Normalizes error messages by sorting field arrays to group similar errors
- *
- * Example: Converts both:
- * - ["first_name" "last_name"] data doesn't match...
- * - ["last_name" "first_name"] data doesn't match...
- * into: ["first_name" "last_name"] data doesn't match...
- *
- * @param errorMessage - The original error message
- * @returns The normalized error message with sorted field arrays
- */
-export function normalizeErrorMessage(errorMessage: string): string {
- // Match array-like patterns in error messages: ["field1" "field2"]
- const arrayPattern = /\[([^\]]+)\]/g;
-
- return errorMessage.replace(arrayPattern, (_match, fields: string) => {
- // Split by spaces and quotes, filter out empty strings
- const fieldNames = fields
- .split(/["'\s]+/)
- .filter((f: string) => f.trim().length > 0);
-
- // Sort field names alphabetically
- fieldNames.sort();
-
- // Reconstruct the array notation
- return `[${fieldNames.map((f: string) => `"${f}"`).join(' ')}]`;
- });
-}
-
/**
* Displays a formatted summary of the import operation
*
diff --git a/src/transformers/auth0.ts b/src/transformers/auth0.ts
index bfa1ad8..75442a5 100644
--- a/src/transformers/auth0.ts
+++ b/src/transformers/auth0.ts
@@ -1,3 +1,5 @@
+import type { TransformerRegistryEntry } from '../types';
+
/**
* Transformer for migrating users from Auth0
*
@@ -80,6 +82,6 @@ const auth0Transformer = {
// Auth0 uses bcrypt with $2a$ or $2b$ prefix and 10 salt rounds
passwordHasher: 'bcrypt' as const,
},
-};
+} satisfies TransformerRegistryEntry;
export default auth0Transformer;
diff --git a/src/transformers/authjs.ts b/src/transformers/authjs.ts
index 9868704..9d7fee5 100644
--- a/src/transformers/authjs.ts
+++ b/src/transformers/authjs.ts
@@ -1,3 +1,5 @@
+import type { TransformerRegistryEntry } from '../types';
+
/**
* Transformer for migrating users from Auth.js (formerly Next-Auth)
*
@@ -62,6 +64,6 @@ const authjsTransformer = {
delete user.name;
}
},
-};
+} satisfies TransformerRegistryEntry;
export default authjsTransformer;
diff --git a/src/transformers/betterauth.ts b/src/transformers/betterauth.ts
new file mode 100644
index 0000000..f583967
--- /dev/null
+++ b/src/transformers/betterauth.ts
@@ -0,0 +1,106 @@
+import type { TransformerRegistryEntry } from '../types';
+
+/**
+ * Transformer for migrating users from Better Auth
+ *
+ * Maps Better Auth user export format to Clerk's import format.
+ * Works with the Better Auth export (bun export:betterauth).
+ *
+ * Handles Better Auth-specific features:
+ * - Email verification status routing (email_verified)
+ * - Phone verification status routing (phone_number_verified)
+ * - Bcrypt password hashes from the credential account
+ * - Name splitting (name → firstName + lastName)
+ * - Admin plugin banned flag
+ * - Cleanup of plugin-specific fields that aren't in the Clerk schema
+ *
+ * @property {string} key - Transformer identifier used in CLI
+ * @property {string} label - Display name shown in CLI prompts
+ * @property {string} description - Detailed description shown in CLI
+ * @property {Object} transformer - Field mapping configuration
+ * @property {Function} postTransform - Custom transformation logic
+ * @property {Object} defaults - Default values applied to all users (passwordHasher: bcrypt)
+ */
+const betterAuthTransformer = {
+ key: 'betterauth',
+ label: 'Better Auth',
+ description:
+ 'Works with the Better Auth export (bun export:betterauth). Supports bcrypt passwords and optional plugin fields.',
+ transformer: {
+ user_id: 'userId',
+ email: 'email',
+ email_verified: 'emailVerified',
+ name: 'name',
+ password_hash: 'password',
+ username: 'username',
+ phone_number: 'phone',
+ phone_number_verified: 'phoneVerified',
+ created_at: 'createdAt',
+ updated_at: 'updatedAt',
+ },
+ postTransform: (user: Record) => {
+ // Handle email verification
+ const emailVerified = user.emailVerified as boolean | undefined;
+ const email = user.email as string | undefined;
+
+ if (email) {
+ if (emailVerified === true) {
+ user.email = email;
+ } else {
+ user.unverifiedEmailAddresses = email;
+ delete user.email;
+ }
+ }
+
+ // Handle phone verification
+ const phoneVerified = user.phoneVerified as boolean | undefined;
+ const phone = user.phone as string | undefined;
+
+ if (phone) {
+ if (phoneVerified === true) {
+ user.phone = phone;
+ } else {
+ user.unverifiedPhoneNumbers = phone;
+ delete user.phone;
+ }
+ }
+
+ // Clean up verification fields
+ delete user.emailVerified;
+ delete user.phoneVerified;
+
+ // Split name into firstName and lastName
+ const name = user.name as string | null | undefined;
+ if (name && typeof name === 'string') {
+ const trimmedName = name.trim();
+ const nameParts = trimmedName.split(/\s+/);
+
+ if (nameParts.length > 1) {
+ user.firstName = nameParts[0];
+ user.lastName = nameParts.slice(1).join(' ');
+ }
+
+ delete user.name;
+ }
+
+ // Map banned field if present
+ const banned = user.banned as boolean | undefined;
+ if (banned === true) {
+ user.banned = true;
+ } else {
+ delete user.banned;
+ }
+
+ // Clean up plugin-specific fields that aren't in the Clerk schema
+ delete user.display_username;
+ delete user.role;
+ delete user.ban_reason;
+ delete user.ban_expires;
+ delete user.two_factor_enabled;
+ },
+ defaults: {
+ passwordHasher: 'bcrypt' as const,
+ },
+} satisfies TransformerRegistryEntry;
+
+export default betterAuthTransformer;
diff --git a/src/transformers/clerk.ts b/src/transformers/clerk.ts
index 1b8af9f..503e0e2 100644
--- a/src/transformers/clerk.ts
+++ b/src/transformers/clerk.ts
@@ -1,3 +1,5 @@
+import type { TransformerRegistryEntry } from '../types';
+
/**
* Transformer for migrating users from one Clerk instance to another
*
@@ -35,6 +37,6 @@ const clerkTransformer = {
unsafe_metadata: 'unsafeMetadata',
private_metadata: 'privateMetadata',
},
-};
+} satisfies TransformerRegistryEntry;
export default clerkTransformer;
diff --git a/src/transformers/firebase.ts b/src/transformers/firebase.ts
index 03159d7..5916d13 100644
--- a/src/transformers/firebase.ts
+++ b/src/transformers/firebase.ts
@@ -1,6 +1,11 @@
import fs from 'fs';
import path from 'path';
-import type { FirebaseHashConfig, PreTransformResult, User } from '../types';
+import type {
+ FirebaseHashConfig,
+ PreTransformResult,
+ TransformerRegistryEntry,
+ User,
+} from '../types';
// Re-export for backwards compatibility
export type { FirebaseHashConfig } from '../types';
@@ -217,6 +222,6 @@ const firebaseTransformer = {
defaults: {
passwordHasher: 'scrypt_firebase' as const,
},
-};
+} satisfies TransformerRegistryEntry;
export default firebaseTransformer;
diff --git a/src/transformers/index.ts b/src/transformers/index.ts
index f6499e7..069f4ed 100644
--- a/src/transformers/index.ts
+++ b/src/transformers/index.ts
@@ -1,13 +1 @@
-import clerkTransformer from './clerk';
-import auth0Transformer from './auth0';
-import authjsTransformer from './authjs';
-import firebaseTransformer from './firebase';
-import supabaseTransformer from './supabase';
-
-export const transformers = [
- clerkTransformer,
- auth0Transformer,
- authjsTransformer,
- firebaseTransformer,
- supabaseTransformer,
-];
+export { transformers } from './registry';
diff --git a/src/transformers/registry.ts b/src/transformers/registry.ts
new file mode 100644
index 0000000..bacc8eb
--- /dev/null
+++ b/src/transformers/registry.ts
@@ -0,0 +1,28 @@
+/**
+ * Transformer registry
+ *
+ * Central registration of all available platform transformers. The migration
+ * CLI reads from this array to build the interactive picker and resolve
+ * --transformer flags.
+ *
+ * To add a new transformer:
+ * 1. Create src/transformers/[platform].ts with a transformer config
+ * 2. Import and register it here
+ * 3. The CLI will automatically include it in the platform selection
+ */
+import type { TransformerRegistryEntry } from '../types';
+import clerkTransformer from './clerk';
+import auth0Transformer from './auth0';
+import authjsTransformer from './authjs';
+import betterAuthTransformer from './betterauth';
+import firebaseTransformer from './firebase';
+import supabaseTransformer from './supabase';
+
+export const transformers: TransformerRegistryEntry[] = [
+ clerkTransformer,
+ auth0Transformer,
+ authjsTransformer,
+ betterAuthTransformer,
+ firebaseTransformer,
+ supabaseTransformer,
+];
diff --git a/src/transformers/supabase.ts b/src/transformers/supabase.ts
index 2187679..757c327 100644
--- a/src/transformers/supabase.ts
+++ b/src/transformers/supabase.ts
@@ -1,3 +1,5 @@
+import type { TransformerRegistryEntry } from '../types';
+
/**
* Transformer for migrating users from Supabase Auth
*
@@ -114,6 +116,6 @@ const supabaseTransformer = {
defaults: {
passwordHasher: 'bcrypt' as const,
},
-};
+} satisfies TransformerRegistryEntry;
export default supabaseTransformer;
diff --git a/src/types.ts b/src/types.ts
index 27755bf..c49c7cd 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -123,6 +123,19 @@ export type ImportSummary = {
errorBreakdown: Map;
};
+/**
+ * Log entry for a user export attempt
+ *
+ * @property userId - The user ID that was exported
+ * @property status - Whether the export succeeded or failed
+ * @property error - Error message if export failed
+ */
+export type ExportLogEntry = {
+ userId: string;
+ status: 'success' | 'error';
+ error?: string;
+};
+
/**
* Log entry for a user deletion attempt
*
@@ -226,3 +239,52 @@ export type SettingsResult = {
file: string;
key?: string;
};
+
+/**
+ * Common fields returned by all export functions
+ *
+ * @property userCount - Number of users exported
+ * @property outputPath - Absolute path to the output file
+ * @property fieldCoverage - Map of field names to counts of users with that field
+ */
+export type BaseExportResult = {
+ userCount: number;
+ outputPath: string;
+ fieldCoverage: Record;
+};
+
+/**
+ * Registry entry for the export dispatcher
+ *
+ * @property key - Unique key used for CLI --platform flag
+ * @property label - Display name shown in the interactive picker
+ * @property description - Short description shown below the label
+ * @property run - Function that executes the export
+ */
+export type ExportRegistryEntry = {
+ key: string;
+ label: string;
+ description: string;
+ run: () => Promise;
+};
+
+/**
+ * Registry entry for a platform transformer
+ *
+ * @property key - Unique key used for CLI --transformer flag
+ * @property label - Display name shown in the interactive picker
+ * @property description - Detailed description shown in CLI
+ * @property transformer - Field mapping from source platform to Clerk schema
+ * @property defaults - Default values applied to all users from this platform
+ * @property preTransform - Pre-processing before field transformation
+ * @property postTransform - Custom logic applied after field mapping
+ */
+export type TransformerRegistryEntry = {
+ key: string;
+ label: string;
+ description: string;
+ transformer: Record;
+ defaults?: Record;
+ preTransform?: (filePath: string, fileType: string) => PreTransformResult;
+ postTransform?: (user: Record) => void;
+};
diff --git a/tests/betterauth-transformer.test.ts b/tests/betterauth-transformer.test.ts
new file mode 100644
index 0000000..9e5f2be
--- /dev/null
+++ b/tests/betterauth-transformer.test.ts
@@ -0,0 +1,210 @@
+import { describe, expect, test } from 'vitest';
+
+// ============================================================================
+// Better Auth transformer tests
+// ============================================================================
+
+describe('betterAuthTransformer', () => {
+ async function getTransformer() {
+ const mod = await import('../src/transformers/betterauth');
+ return mod.default;
+ }
+
+ test('has correct key and label', async () => {
+ const transformer = await getTransformer();
+ expect(transformer.key).toBe('betterauth');
+ expect(transformer.label).toBe('Better Auth');
+ });
+
+ test('maps core fields correctly', async () => {
+ const transformer = await getTransformer();
+ expect(transformer.transformer.user_id).toBe('userId');
+ expect(transformer.transformer.email).toBe('email');
+ expect(transformer.transformer.password_hash).toBe('password');
+ expect(transformer.transformer.username).toBe('username');
+ expect(transformer.transformer.phone_number).toBe('phone');
+ expect(transformer.transformer.created_at).toBe('createdAt');
+ expect(transformer.transformer.updated_at).toBe('updatedAt');
+ });
+
+ test('sets passwordHasher: bcrypt as default', async () => {
+ const transformer = await getTransformer();
+ expect(transformer.defaults.passwordHasher).toBe('bcrypt');
+ });
+
+ test('routes verified email to email', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ email: 'test@example.com',
+ emailVerified: true,
+ };
+ transformer.postTransform(user);
+
+ expect(user.email).toBe('test@example.com');
+ expect(user.unverifiedEmailAddresses).toBeUndefined();
+ expect(user.emailVerified).toBeUndefined();
+ });
+
+ test('routes unverified email to unverifiedEmailAddresses', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ email: 'test@example.com',
+ emailVerified: false,
+ };
+ transformer.postTransform(user);
+
+ expect(user.email).toBeUndefined();
+ expect(user.unverifiedEmailAddresses).toBe('test@example.com');
+ expect(user.emailVerified).toBeUndefined();
+ });
+
+ test('routes verified phone to phone', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ phone: '+1234567890',
+ phoneVerified: true,
+ };
+ transformer.postTransform(user);
+
+ expect(user.phone).toBe('+1234567890');
+ expect(user.unverifiedPhoneNumbers).toBeUndefined();
+ expect(user.phoneVerified).toBeUndefined();
+ });
+
+ test('routes unverified phone to unverifiedPhoneNumbers', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ phone: '+1234567890',
+ phoneVerified: false,
+ };
+ transformer.postTransform(user);
+
+ expect(user.phone).toBeUndefined();
+ expect(user.unverifiedPhoneNumbers).toBe('+1234567890');
+ expect(user.phoneVerified).toBeUndefined();
+ });
+
+ test('splits name into firstName and lastName', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ name: 'John Doe',
+ };
+ transformer.postTransform(user);
+
+ expect(user.firstName).toBe('John');
+ expect(user.lastName).toBe('Doe');
+ expect(user.name).toBeUndefined();
+ });
+
+ test('handles multi-word last names', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ name: 'William James Miller',
+ };
+ transformer.postTransform(user);
+
+ expect(user.firstName).toBe('William');
+ expect(user.lastName).toBe('James Miller');
+ expect(user.name).toBeUndefined();
+ });
+
+ test('handles single-word names (no split)', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ name: 'Noah',
+ };
+ transformer.postTransform(user);
+
+ // Single word: firstName/lastName not set, name deleted
+ expect(user.firstName).toBeUndefined();
+ expect(user.lastName).toBeUndefined();
+ expect(user.name).toBeUndefined();
+ });
+
+ test('handles missing name gracefully', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ name: null,
+ };
+ transformer.postTransform(user);
+
+ expect(user.firstName).toBeUndefined();
+ expect(user.lastName).toBeUndefined();
+ });
+
+ test('maps banned field when true', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ banned: true,
+ };
+ transformer.postTransform(user);
+
+ expect(user.banned).toBe(true);
+ });
+
+ test('removes banned field when false', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ banned: false,
+ };
+ transformer.postTransform(user);
+
+ expect(user.banned).toBeUndefined();
+ });
+
+ test('cleans up intermediate plugin fields', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ display_username: 'TestUser',
+ role: 'admin',
+ ban_reason: 'Spam',
+ ban_expires: '2026-12-31',
+ two_factor_enabled: true,
+ emailVerified: true,
+ phoneVerified: false,
+ };
+ transformer.postTransform(user);
+
+ expect(user.display_username).toBeUndefined();
+ expect(user.role).toBeUndefined();
+ expect(user.ban_reason).toBeUndefined();
+ expect(user.ban_expires).toBeUndefined();
+ expect(user.two_factor_enabled).toBeUndefined();
+ expect(user.emailVerified).toBeUndefined();
+ expect(user.phoneVerified).toBeUndefined();
+ });
+
+ test('full transform with all fields', async () => {
+ const transformer = await getTransformer();
+ const user: Record = {
+ email: 'test@example.com',
+ emailVerified: true,
+ name: 'Jane Smith',
+ phone: '+1234567890',
+ phoneVerified: true,
+ banned: true,
+ display_username: 'JaneS',
+ role: 'user',
+ ban_reason: null,
+ ban_expires: null,
+ two_factor_enabled: false,
+ };
+ transformer.postTransform(user);
+
+ expect(user.email).toBe('test@example.com');
+ expect(user.firstName).toBe('Jane');
+ expect(user.lastName).toBe('Smith');
+ expect(user.phone).toBe('+1234567890');
+ expect(user.banned).toBe(true);
+
+ // All intermediate fields cleaned up
+ expect(user.emailVerified).toBeUndefined();
+ expect(user.phoneVerified).toBeUndefined();
+ expect(user.name).toBeUndefined();
+ expect(user.display_username).toBeUndefined();
+ expect(user.role).toBeUndefined();
+ expect(user.ban_reason).toBeUndefined();
+ expect(user.ban_expires).toBeUndefined();
+ expect(user.two_factor_enabled).toBeUndefined();
+ });
+});
diff --git a/tests/delete.test.ts b/tests/delete.test.ts
index 5def214..eeae1d7 100644
--- a/tests/delete.test.ts
+++ b/tests/delete.test.ts
@@ -40,30 +40,35 @@ vi.mock('picocolors', () => ({
}));
// Mock utils
-vi.mock('../src/utils', () => ({
- getDateTimeStamp: vi.fn(() => '2024-01-01T12:00:00'),
- createImportFilePath: vi.fn((file: string) => file),
- getFileType: vi.fn(() => 'application/json'),
- tryCatch: async (promise: Promise) => {
- try {
- const data = await promise;
- return [data, null];
- } catch (error) {
- return [null, error];
- }
- },
- getRetryDelay: (
- retryCount: number,
- retryAfterSeconds: number | undefined,
- defaultDelayMs: number
- ) => {
- const delayMs = retryAfterSeconds
- ? retryAfterSeconds * 1000
- : defaultDelayMs;
- const delaySeconds = retryAfterSeconds || defaultDelayMs / 1000;
- return { delayMs, delaySeconds };
- },
-}));
+vi.mock('../src/lib', async (importOriginal) => {
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
+ const actual = (await importOriginal()) as Record;
+ return {
+ ...actual,
+ getDateTimeStamp: vi.fn(() => '2024-01-01T12:00:00'),
+ createImportFilePath: vi.fn((file: string) => file),
+ getFileType: vi.fn(() => 'application/json'),
+ tryCatch: async (promise: Promise) => {
+ try {
+ const data = await promise;
+ return [data, null];
+ } catch (error) {
+ return [null, error];
+ }
+ },
+ getRetryDelay: (
+ retryCount: number,
+ retryAfterSeconds: number | undefined,
+ defaultDelayMs: number
+ ) => {
+ const delayMs = retryAfterSeconds
+ ? retryAfterSeconds * 1000
+ : defaultDelayMs;
+ const delaySeconds = retryAfterSeconds || defaultDelayMs / 1000;
+ return { delayMs, delaySeconds };
+ },
+ };
+});
// Mock env constants
vi.mock('../src/envs-constants', () => ({
@@ -108,10 +113,8 @@ vi.mock('../src/logger', () => ({
// Import after mocks are set up
import { deleteErrorLogger, deleteLogger } from '../src/logger';
import * as fs from 'fs';
-import {
- getSourceUserIdField,
- normalizeErrorMessage,
-} from '../src/delete/index';
+import { getSourceUserIdField } from '../src/delete/index';
+import { normalizeErrorMessage } from '../src/lib';
// Get reference to mocked functions - cast to mock type since vi.mocked is not available
const _mockDeleteErrorLogger = deleteErrorLogger as ReturnType;
diff --git a/tests/export-auth0.test.ts b/tests/export-auth0.test.ts
new file mode 100644
index 0000000..82de52b
--- /dev/null
+++ b/tests/export-auth0.test.ts
@@ -0,0 +1,304 @@
+import { beforeEach, describe, expect, test, vi } from 'vitest';
+
+// Use vi.hoisted so variables are available when vi.mock factories run
+const { mockGetAll, mockWriteFileSync, mockMkdirSync, mockExportLogger } =
+ vi.hoisted(() => ({
+ mockGetAll: vi.fn(),
+ mockWriteFileSync: vi.fn(),
+ mockMkdirSync: vi.fn(),
+ mockExportLogger: vi.fn(),
+ }));
+
+// Mock auth0 ManagementClient
+vi.mock('auth0', () => ({
+ ManagementClient: class MockManagementClient {
+ users = {
+ getAll: mockGetAll,
+ };
+ },
+}));
+
+// Mock fs to avoid writing files during tests
+vi.mock('fs', async () => {
+ const actual = await vi.importActual('fs');
+ return {
+ ...actual,
+ default: {
+ ...(actual as Record),
+ writeFileSync: mockWriteFileSync,
+ mkdirSync: mockMkdirSync,
+ appendFileSync: vi.fn(),
+ },
+ };
+});
+
+// Mock logger
+vi.mock('../src/logger', () => ({
+ exportLogger: mockExportLogger,
+ closeAllStreams: vi.fn(),
+}));
+
+// ============================================================================
+// mapAuth0UserToExport tests
+// ============================================================================
+
+describe('mapAuth0UserToExport', () => {
+ async function getMapFn() {
+ const mod = await import('../src/export/auth0');
+ return mod.mapAuth0UserToExport;
+ }
+
+ test('maps core fields correctly', async () => {
+ const mapAuth0UserToExport = await getMapFn();
+ const result = mapAuth0UserToExport({
+ user_id: 'auth0|abc123',
+ email: 'test@example.com',
+ email_verified: true,
+ given_name: 'John',
+ family_name: 'Doe',
+ created_at: '2025-01-01T00:00:00.000Z',
+ });
+
+ expect(result.user_id).toBe('auth0|abc123');
+ expect(result.email).toBe('test@example.com');
+ expect(result.email_verified).toBe(true);
+ expect(result.given_name).toBe('John');
+ expect(result.family_name).toBe('Doe');
+ expect(result.created_at).toBe('2025-01-01T00:00:00.000Z');
+ });
+
+ test('includes optional fields when present', async () => {
+ const mapAuth0UserToExport = await getMapFn();
+ const result = mapAuth0UserToExport({
+ user_id: 'auth0|abc123',
+ email: 'test@example.com',
+ username: 'testuser',
+ phone_number: '+1234567890',
+ phone_verified: true,
+ user_metadata: { theme: 'dark' },
+ app_metadata: { role: 'admin' },
+ });
+
+ expect(result.username).toBe('testuser');
+ expect(result.phone_number).toBe('+1234567890');
+ expect(result.phone_verified).toBe(true);
+ expect(result.user_metadata).toEqual({ theme: 'dark' });
+ expect(result.app_metadata).toEqual({ role: 'admin' });
+ });
+
+ test('omits null/empty fields', async () => {
+ const mapAuth0UserToExport = await getMapFn();
+ const result = mapAuth0UserToExport({
+ user_id: 'auth0|abc123',
+ email: null,
+ username: null,
+ phone_number: null,
+ user_metadata: {},
+ app_metadata: {},
+ });
+
+ expect(result.email).toBeUndefined();
+ expect(result.username).toBeUndefined();
+ expect(result.phone_number).toBeUndefined();
+ expect(result.user_metadata).toBeUndefined();
+ expect(result.app_metadata).toBeUndefined();
+ });
+});
+
+// ============================================================================
+// exportAuth0Users tests
+// ============================================================================
+
+describe('exportAuth0Users', () => {
+ beforeEach(() => {
+ vi.clearAllMocks();
+ });
+
+ async function getExportFn() {
+ const mod = await import('../src/export/auth0');
+ return mod.exportAuth0Users;
+ }
+
+ function makeAuth0User(overrides: Record = {}) {
+ return {
+ user_id: 'auth0|test123',
+ email: 'test@example.com',
+ email_verified: true,
+ username: null,
+ given_name: 'Test',
+ family_name: 'User',
+ phone_number: null,
+ phone_verified: false,
+ user_metadata: {},
+ app_metadata: {},
+ created_at: '2025-01-01T00:00:00.000Z',
+ ...overrides,
+ };
+ }
+
+ test('paginates when results fill a page', async () => {
+ const exportAuth0Users = await getExportFn();
+
+ // First call returns 100 users (full page), second returns less
+ const page1 = Array.from({ length: 100 }, (_, i) =>
+ makeAuth0User({ user_id: `auth0|user_${i}` })
+ );
+ const page2 = [makeAuth0User({ user_id: 'auth0|user_100' })];
+
+ mockGetAll
+ .mockResolvedValueOnce({ data: { users: page1 } })
+ .mockResolvedValueOnce({ data: { users: page2 } });
+
+ const result = await exportAuth0Users(
+ 'test.auth0.com',
+ 'client-id',
+ 'client-secret',
+ 'test-output.json'
+ );
+
+ expect(mockGetAll).toHaveBeenCalledTimes(2);
+ expect(mockGetAll).toHaveBeenCalledWith({
+ page: 0,
+ per_page: 100,
+ include_totals: true,
+ });
+ expect(mockGetAll).toHaveBeenCalledWith({
+ page: 1,
+ per_page: 100,
+ include_totals: true,
+ });
+ expect(result.userCount).toBe(101);
+ });
+
+ test('does not paginate when results are less than page size', async () => {
+ const exportAuth0Users = await getExportFn();
+
+ const users = [
+ makeAuth0User({ user_id: 'auth0|user_1' }),
+ makeAuth0User({ user_id: 'auth0|user_2' }),
+ ];
+
+ mockGetAll.mockResolvedValueOnce({ data: { users } });
+
+ const result = await exportAuth0Users(
+ 'test.auth0.com',
+ 'client-id',
+ 'client-secret',
+ 'test-output.json'
+ );
+
+ expect(mockGetAll).toHaveBeenCalledTimes(1);
+ expect(result.userCount).toBe(2);
+ });
+
+ test('returns accurate field coverage counts', async () => {
+ const exportAuth0Users = await getExportFn();
+
+ const users = [
+ makeAuth0User({
+ user_id: 'auth0|user_1',
+ email: 'a@test.com',
+ username: 'user1',
+ given_name: 'Test',
+ family_name: 'User',
+ phone_number: '+1234567890',
+ }),
+ makeAuth0User({
+ user_id: 'auth0|user_2',
+ email: 'b@test.com',
+ username: null,
+ given_name: null,
+ family_name: null,
+ phone_number: null,
+ }),
+ ];
+
+ mockGetAll.mockResolvedValueOnce({ data: { users } });
+
+ const result = await exportAuth0Users(
+ 'test.auth0.com',
+ 'client-id',
+ 'client-secret',
+ 'test-output.json'
+ );
+
+ expect(result.fieldCoverage.email).toBe(2);
+ expect(result.fieldCoverage.username).toBe(1);
+ expect(result.fieldCoverage.firstName).toBe(1);
+ expect(result.fieldCoverage.lastName).toBe(1);
+ expect(result.fieldCoverage.phone).toBe(1);
+ expect(result.fieldCoverage.password).toBe(0); // Never available from API
+ });
+
+ test('writes JSON output to exports directory', async () => {
+ const exportAuth0Users = await getExportFn();
+
+ mockGetAll.mockResolvedValueOnce({
+ data: { users: [makeAuth0User()] },
+ });
+
+ await exportAuth0Users(
+ 'test.auth0.com',
+ 'client-id',
+ 'client-secret',
+ 'test-output.json'
+ );
+
+ expect(mockMkdirSync).toHaveBeenCalledWith(
+ expect.stringContaining('exports'),
+ { recursive: true }
+ );
+ expect(mockWriteFileSync).toHaveBeenCalledWith(
+ expect.stringContaining('test-output.json'),
+ expect.any(String)
+ );
+
+ const writtenJson = mockWriteFileSync.mock.calls[0][1] as string;
+ const parsed = JSON.parse(writtenJson);
+ expect(Array.isArray(parsed)).toBe(true);
+ expect(parsed[0].user_id).toBe('auth0|test123');
+ });
+
+ test('logs each user via exportLogger', async () => {
+ const exportAuth0Users = await getExportFn();
+
+ const users = [
+ makeAuth0User({ user_id: 'auth0|user_1' }),
+ makeAuth0User({ user_id: 'auth0|user_2' }),
+ ];
+
+ mockGetAll.mockResolvedValueOnce({ data: { users } });
+
+ await exportAuth0Users(
+ 'test.auth0.com',
+ 'client-id',
+ 'client-secret',
+ 'test-output.json'
+ );
+
+ expect(mockExportLogger).toHaveBeenCalledTimes(2);
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'auth0|user_1', status: 'success' },
+ expect.any(String)
+ );
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'auth0|user_2', status: 'success' },
+ expect.any(String)
+ );
+ });
+
+ test('handles API errors', async () => {
+ const exportAuth0Users = await getExportFn();
+
+ mockGetAll.mockRejectedValue(new Error('Unauthorized'));
+
+ await expect(
+ exportAuth0Users(
+ 'test.auth0.com',
+ 'client-id',
+ 'client-secret',
+ 'test-output.json'
+ )
+ ).rejects.toThrow('Unauthorized');
+ });
+});
diff --git a/tests/export-authjs.test.ts b/tests/export-authjs.test.ts
new file mode 100644
index 0000000..670c63b
--- /dev/null
+++ b/tests/export-authjs.test.ts
@@ -0,0 +1,319 @@
+import { beforeEach, describe, expect, test, vi } from 'vitest';
+
+// Use vi.hoisted so variables are available when vi.mock factories run
+const {
+ mockCreateDbClient,
+ mockWriteFileSync,
+ mockMkdirSync,
+ mockExportLogger,
+} = vi.hoisted(() => ({
+ mockCreateDbClient: vi.fn(),
+ mockWriteFileSync: vi.fn(),
+ mockMkdirSync: vi.fn(),
+ mockExportLogger: vi.fn(),
+}));
+
+// Mock the db module
+vi.mock('../src/lib/db', () => ({
+ createDbClient: mockCreateDbClient,
+}));
+
+// Mock fs to avoid writing files during tests
+vi.mock('fs', async () => {
+ const actual = await vi.importActual('fs');
+ return {
+ ...actual,
+ default: {
+ ...(actual as Record),
+ writeFileSync: mockWriteFileSync,
+ mkdirSync: mockMkdirSync,
+ appendFileSync: vi.fn(),
+ },
+ };
+});
+
+// Mock logger
+vi.mock('../src/logger', () => ({
+ exportLogger: mockExportLogger,
+ closeAllStreams: vi.fn(),
+}));
+
+// ============================================================================
+// exportAuthJSUsers tests
+// ============================================================================
+
+describe('exportAuthJSUsers', () => {
+ const dbUrl = 'postgresql://user:password@localhost:5432/authjs_db';
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+ });
+
+ async function getExportFn() {
+ const mod = await import('../src/export/authjs');
+ return mod.exportAuthJSUsers;
+ }
+
+ function makeMockClient(
+ dbType: 'postgres' | 'mysql' | 'sqlite' = 'postgres'
+ ) {
+ const mockQuery = vi.fn();
+ const mockEnd = vi.fn();
+ const client = {
+ dbType,
+ query: mockQuery,
+ end: mockEnd,
+ };
+ mockCreateDbClient.mockResolvedValue(client);
+ return { client, mockQuery, mockEnd };
+ }
+
+ test('exports users with correct field coverage (PostgreSQL)', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('postgres');
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ {
+ id: 'user_1',
+ name: 'Test User',
+ email: 'test@example.com',
+ email_verified: '2025-01-01T00:00:00.000Z',
+ },
+ {
+ id: 'user_2',
+ name: null,
+ email: 'other@example.com',
+ email_verified: null,
+ },
+ ],
+ });
+
+ const exportAuthJSUsers = await getExportFn();
+ const result = await exportAuthJSUsers(dbUrl, 'test-output.json');
+
+ expect(result.userCount).toBe(2);
+ expect(result.fieldCoverage.email).toBe(2);
+ expect(result.fieldCoverage.emailVerified).toBe(1);
+ expect(result.fieldCoverage.name).toBe(1);
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('uses correct quoting for PostgreSQL', async () => {
+ const { mockQuery } = makeMockClient('postgres');
+ mockQuery.mockResolvedValueOnce({ rows: [] });
+
+ const exportAuthJSUsers = await getExportFn();
+ await exportAuthJSUsers(dbUrl, 'test-output.json');
+
+ const query = mockQuery.mock.calls[0][0] as string;
+ expect(query).toContain('"User"');
+ expect(query).toContain('"emailVerified"');
+ });
+
+ test('uses correct quoting for MySQL', async () => {
+ const { mockQuery } = makeMockClient('mysql');
+ mockQuery.mockResolvedValueOnce({ rows: [] });
+
+ const exportAuthJSUsers = await getExportFn();
+ await exportAuthJSUsers(
+ 'mysql://user:pass@localhost/db',
+ 'test-output.json'
+ );
+
+ const query = mockQuery.mock.calls[0][0] as string;
+ expect(query).toContain('`User`');
+ expect(query).toContain('`emailVerified`');
+ });
+
+ test('uses correct quoting for SQLite', async () => {
+ const { mockQuery } = makeMockClient('sqlite');
+ mockQuery.mockResolvedValueOnce({ rows: [] });
+
+ const exportAuthJSUsers = await getExportFn();
+ await exportAuthJSUsers('/path/to/db.sqlite', 'test-output.json');
+
+ const query = mockQuery.mock.calls[0][0] as string;
+ expect(query).toContain('"User"');
+ expect(query).toContain('"emailVerified"');
+ });
+
+ test('retries with lowercase table names on table not found', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('postgres');
+ // First query fails with "does not exist"
+ mockQuery.mockRejectedValueOnce(
+ new Error('relation "User" does not exist')
+ );
+ // Retry with lowercase succeeds
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ {
+ id: 'user_1',
+ name: 'Test',
+ email: 'test@example.com',
+ email_verified: null,
+ },
+ ],
+ });
+
+ const exportAuthJSUsers = await getExportFn();
+ const result = await exportAuthJSUsers(dbUrl, 'test-output.json');
+
+ expect(result.userCount).toBe(1);
+ expect(mockQuery).toHaveBeenCalledTimes(2);
+ // Second query should use lowercase
+ const secondQuery = mockQuery.mock.calls[1][0] as string;
+ expect(secondQuery).toContain('"user"');
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('throws helpful error when both PascalCase and lowercase fail', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('postgres');
+ mockQuery.mockRejectedValueOnce(
+ new Error('relation "User" does not exist')
+ );
+ mockQuery.mockRejectedValueOnce(
+ new Error('relation "user" does not exist')
+ );
+
+ const exportAuthJSUsers = await getExportFn();
+ await expect(exportAuthJSUsers(dbUrl, 'test-output.json')).rejects.toThrow(
+ 'Could not find AuthJS tables'
+ );
+
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('shows connection error hints for authjs platform', async () => {
+ mockCreateDbClient.mockRejectedValue(
+ new Error('getaddrinfo ENOTFOUND localhost')
+ );
+
+ const exportAuthJSUsers = await getExportFn();
+ await expect(exportAuthJSUsers(dbUrl, 'test-output.json')).rejects.toThrow(
+ 'hostname could not be resolved'
+ );
+ });
+
+ test('writes JSON output to exports directory', async () => {
+ const { mockQuery } = makeMockClient('postgres');
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ {
+ id: 'user_1',
+ name: 'Test',
+ email: 'test@example.com',
+ email_verified: null,
+ },
+ ],
+ });
+
+ const exportAuthJSUsers = await getExportFn();
+ await exportAuthJSUsers(dbUrl, 'test-output.json');
+
+ expect(mockMkdirSync).toHaveBeenCalledWith(
+ expect.stringContaining('exports'),
+ { recursive: true }
+ );
+ expect(mockWriteFileSync).toHaveBeenCalledWith(
+ expect.stringContaining('test-output.json'),
+ expect.any(String)
+ );
+
+ const writtenJson = mockWriteFileSync.mock.calls[0][1] as string;
+ const parsed = JSON.parse(writtenJson);
+ expect(Array.isArray(parsed)).toBe(true);
+ expect(parsed[0].id).toBe('user_1');
+ });
+
+ test('logs each user via exportLogger', async () => {
+ const { mockQuery } = makeMockClient('postgres');
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ {
+ id: 'user_1',
+ name: 'A',
+ email: 'a@example.com',
+ email_verified: null,
+ },
+ {
+ id: 'user_2',
+ name: 'B',
+ email: 'b@example.com',
+ email_verified: '2025-01-01',
+ },
+ ],
+ });
+
+ const exportAuthJSUsers = await getExportFn();
+ await exportAuthJSUsers(dbUrl, 'test-output.json');
+
+ expect(mockExportLogger).toHaveBeenCalledTimes(2);
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_1', status: 'success' },
+ expect.any(String)
+ );
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_2', status: 'success' },
+ expect.any(String)
+ );
+ });
+});
+
+// ============================================================================
+// resolveConnectionString with AuthJS options
+// ============================================================================
+
+describe('resolveConnectionString with AuthJS options', () => {
+ async function getResolveFn() {
+ const mod = await import('../src/lib/index');
+ return mod.resolveConnectionString;
+ }
+
+ const options = {
+ envVarName: 'AUTHJS_DB_URL',
+ defaultOutputFile: 'authjs-export.json',
+ };
+
+ test('uses AUTHJS_DB_URL env var', async () => {
+ const resolveConnectionString = await getResolveFn();
+ const validUrl = 'postgresql://user:password@localhost:5432/authjs_db';
+ const result = resolveConnectionString(
+ [],
+ { AUTHJS_DB_URL: validUrl },
+ options
+ );
+
+ expect(result.dbUrl).toBe(validUrl);
+ expect(result.warning).toBeUndefined();
+ });
+
+ test('defaults outputFile to authjs-export.json', async () => {
+ const resolveConnectionString = await getResolveFn();
+ const result = resolveConnectionString([], {}, options);
+
+ expect(result.outputFile).toBe('authjs-export.json');
+ });
+
+ test('accepts MySQL URL from AUTHJS_DB_URL', async () => {
+ const resolveConnectionString = await getResolveFn();
+ const mysqlUrl = 'mysql://user:pass@localhost:3306/authjs';
+ const result = resolveConnectionString(
+ [],
+ { AUTHJS_DB_URL: mysqlUrl },
+ options
+ );
+
+ expect(result.dbUrl).toBe(mysqlUrl);
+ });
+
+ test('accepts SQLite path from AUTHJS_DB_URL', async () => {
+ const resolveConnectionString = await getResolveFn();
+ const sqlitePath = '/path/to/authjs.sqlite';
+ const result = resolveConnectionString(
+ [],
+ { AUTHJS_DB_URL: sqlitePath },
+ options
+ );
+
+ expect(result.dbUrl).toBe(sqlitePath);
+ });
+});
diff --git a/tests/export-betterauth.test.ts b/tests/export-betterauth.test.ts
new file mode 100644
index 0000000..a891e29
--- /dev/null
+++ b/tests/export-betterauth.test.ts
@@ -0,0 +1,599 @@
+import { beforeEach, describe, expect, test, vi } from 'vitest';
+import { resolveConnectionString } from '../src/lib';
+
+// Use vi.hoisted so variables are available when vi.mock factories run
+const {
+ mockCreateDbClient,
+ mockWriteFileSync,
+ mockMkdirSync,
+ mockExportLogger,
+} = vi.hoisted(() => ({
+ mockCreateDbClient: vi.fn(),
+ mockWriteFileSync: vi.fn(),
+ mockMkdirSync: vi.fn(),
+ mockExportLogger: vi.fn(),
+}));
+
+// Mock the db module
+vi.mock('../src/lib/db', () => ({
+ createDbClient: mockCreateDbClient,
+}));
+
+// Mock fs to avoid writing files during tests
+vi.mock('fs', async () => {
+ const actual = await vi.importActual('fs');
+ return {
+ ...actual,
+ default: {
+ ...(actual as Record),
+ writeFileSync: mockWriteFileSync,
+ mkdirSync: mockMkdirSync,
+ appendFileSync: vi.fn(),
+ },
+ };
+});
+
+// Mock logger
+vi.mock('../src/logger', () => ({
+ exportLogger: mockExportLogger,
+ closeAllStreams: vi.fn(),
+}));
+
+// ============================================================================
+// resolveConnectionString with Better Auth options
+// ============================================================================
+
+describe('resolveConnectionString with Better Auth options', () => {
+ const validUrl = 'postgresql://user:password@localhost:5432/betterauth_db';
+
+ const options = {
+ envVarName: 'BETTER_AUTH_DB_URL',
+ defaultOutputFile: 'betterauth-export.json',
+ };
+
+ test('uses BETTER_AUTH_DB_URL env var', () => {
+ const result = resolveConnectionString(
+ [],
+ { BETTER_AUTH_DB_URL: validUrl },
+ options
+ );
+
+ expect(result.dbUrl).toBe(validUrl);
+ expect(result.warning).toBeUndefined();
+ });
+
+ test('defaults outputFile to betterauth-export.json', () => {
+ const result = resolveConnectionString([], {}, options);
+
+ expect(result.outputFile).toBe('betterauth-export.json');
+ });
+
+ test('--db-url flag takes priority over BETTER_AUTH_DB_URL', () => {
+ const envUrl = 'postgresql://other:pass@localhost:5432/other_db';
+ const result = resolveConnectionString(
+ ['--db-url', validUrl],
+ { BETTER_AUTH_DB_URL: envUrl },
+ options
+ );
+
+ expect(result.dbUrl).toBe(validUrl);
+ });
+
+ test('returns warning when BETTER_AUTH_DB_URL is invalid', () => {
+ const result = resolveConnectionString(
+ [],
+ { BETTER_AUTH_DB_URL: 'not-a-url' },
+ options
+ );
+
+ expect(result.dbUrl).toBeUndefined();
+ expect(result.warning).toContain('BETTER_AUTH_DB_URL');
+ expect(result.warning).toContain('not a valid database connection string');
+ });
+
+ test('does not use SUPABASE_DB_URL', () => {
+ const result = resolveConnectionString(
+ [],
+ { SUPABASE_DB_URL: validUrl },
+ options
+ );
+
+ expect(result.dbUrl).toBeUndefined();
+ });
+
+ test('accepts MySQL URL from BETTER_AUTH_DB_URL', () => {
+ const mysqlUrl = 'mysql://user:pass@localhost:3306/betterauth';
+ const result = resolveConnectionString(
+ [],
+ { BETTER_AUTH_DB_URL: mysqlUrl },
+ options
+ );
+
+ expect(result.dbUrl).toBe(mysqlUrl);
+ expect(result.warning).toBeUndefined();
+ });
+
+ test('accepts SQLite path from BETTER_AUTH_DB_URL', () => {
+ const sqlitePath = '/path/to/betterauth.sqlite';
+ const result = resolveConnectionString(
+ [],
+ { BETTER_AUTH_DB_URL: sqlitePath },
+ options
+ );
+
+ expect(result.dbUrl).toBe(sqlitePath);
+ expect(result.warning).toBeUndefined();
+ });
+});
+
+// ============================================================================
+// buildExportQuery tests
+// ============================================================================
+
+describe('buildExportQuery', () => {
+ async function getBuildFn() {
+ const mod = await import('../src/export/betterauth');
+ return mod.buildExportQuery;
+ }
+
+ test('includes only core columns when no plugins detected', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(new Set());
+
+ expect(query).toContain('"id"');
+ expect(query).toContain('"email"');
+ expect(query).toContain('"emailVerified"');
+ expect(query).toContain('"name"');
+ expect(query).toContain('"createdAt"');
+ expect(query).toContain('"password"');
+ expect(query).not.toContain('"username"');
+ expect(query).not.toContain('"phoneNumber"');
+ expect(query).not.toContain('"role"');
+ expect(query).not.toContain('"twoFactorEnabled"');
+ });
+
+ test('includes username plugin columns when detected', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(
+ new Set(['username', 'displayUsername'] as const)
+ );
+
+ expect(query).toContain('"username"');
+ expect(query).toContain('"displayUsername"');
+ });
+
+ test('includes phone plugin columns when detected', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(
+ new Set(['phoneNumber', 'phoneNumberVerified'] as const)
+ );
+
+ expect(query).toContain('"phoneNumber"');
+ expect(query).toContain('"phoneNumberVerified"');
+ });
+
+ test('includes admin plugin columns when detected', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(
+ new Set(['role', 'banned', 'banReason', 'banExpires'] as const)
+ );
+
+ expect(query).toContain('"role"');
+ expect(query).toContain('"banned"');
+ expect(query).toContain('"banReason"');
+ expect(query).toContain('"banExpires"');
+ });
+
+ test('includes two-factor plugin column when detected', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(new Set(['twoFactorEnabled'] as const));
+
+ expect(query).toContain('"twoFactorEnabled"');
+ });
+
+ test('joins account table on credential provider', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(new Set());
+
+ expect(query).toContain('account');
+ expect(query).toContain("'credential'");
+ });
+
+ test('orders by createdAt', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(new Set());
+
+ expect(query).toContain('"createdAt"');
+ expect(query).toContain('ASC');
+ });
+
+ test('uses backticks for MySQL', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(new Set(), 'mysql');
+
+ expect(query).toContain('`id`');
+ expect(query).toContain('`email`');
+ expect(query).toContain('`user`');
+ expect(query).toContain('`account`');
+ expect(query).not.toContain('"id"');
+ });
+
+ test('uses double quotes for PostgreSQL (default)', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(new Set());
+
+ expect(query).toContain('"id"');
+ expect(query).toContain('"email"');
+ expect(query).not.toContain('`id`');
+ });
+
+ test('uses double quotes for SQLite', async () => {
+ const buildExportQuery = await getBuildFn();
+ const query = buildExportQuery(new Set(), 'sqlite');
+
+ expect(query).toContain('"id"');
+ expect(query).toContain('"email"');
+ expect(query).not.toContain('`id`');
+ });
+});
+
+// ============================================================================
+// getDetectedPluginNames tests
+// ============================================================================
+
+describe('getDetectedPluginNames', () => {
+ async function getDetectFn() {
+ const mod = await import('../src/export/betterauth');
+ return mod.getDetectedPluginNames;
+ }
+
+ test('returns empty array when no plugin columns', async () => {
+ const getDetectedPluginNames = await getDetectFn();
+ expect(getDetectedPluginNames(new Set())).toEqual([]);
+ });
+
+ test('detects username plugin', async () => {
+ const getDetectedPluginNames = await getDetectFn();
+ const result = getDetectedPluginNames(new Set(['username'] as const));
+ expect(result).toContain('username');
+ });
+
+ test('detects phone number plugin', async () => {
+ const getDetectedPluginNames = await getDetectFn();
+ const result = getDetectedPluginNames(new Set(['phoneNumber'] as const));
+ expect(result).toContain('phone number');
+ });
+
+ test('detects admin plugin from any admin column', async () => {
+ const getDetectedPluginNames = await getDetectFn();
+ expect(getDetectedPluginNames(new Set(['banned'] as const))).toContain(
+ 'admin'
+ );
+ expect(getDetectedPluginNames(new Set(['role'] as const))).toContain(
+ 'admin'
+ );
+ });
+
+ test('detects two-factor plugin', async () => {
+ const getDetectedPluginNames = await getDetectFn();
+ const result = getDetectedPluginNames(
+ new Set(['twoFactorEnabled'] as const)
+ );
+ expect(result).toContain('two-factor');
+ });
+
+ test('detects multiple plugins', async () => {
+ const getDetectedPluginNames = await getDetectFn();
+ const result = getDetectedPluginNames(
+ new Set([
+ 'username',
+ 'phoneNumber',
+ 'banned',
+ 'twoFactorEnabled',
+ ] as const)
+ );
+ expect(result).toEqual(['username', 'phone number', 'admin', 'two-factor']);
+ });
+});
+
+// ============================================================================
+// exportBetterAuthUsers tests
+// ============================================================================
+
+describe('exportBetterAuthUsers', () => {
+ const dbUrl = 'postgresql://user:password@localhost:5432/betterauth_db';
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+ });
+
+ async function getExportFn() {
+ const mod = await import('../src/export/betterauth');
+ return mod.exportBetterAuthUsers;
+ }
+
+ function makeMockClient(
+ dbType: 'postgres' | 'mysql' | 'sqlite' = 'postgres'
+ ) {
+ const mockQuery = vi.fn();
+ const mockEnd = vi.fn();
+ const client = {
+ dbType,
+ query: mockQuery,
+ end: mockEnd,
+ };
+ mockCreateDbClient.mockResolvedValue(client);
+ return { client, mockQuery, mockEnd };
+ }
+
+ test('shows hostname hint for ENOTFOUND errors', async () => {
+ mockCreateDbClient.mockRejectedValue(
+ new Error('getaddrinfo ENOTFOUND localhost')
+ );
+ const exportBetterAuthUsers = await getExportFn();
+
+ await expect(exportBetterAuthUsers(dbUrl, 'out.json')).rejects.toThrow(
+ 'hostname could not be resolved'
+ );
+ });
+
+ test('shows unreachable hint for ETIMEDOUT errors', async () => {
+ mockCreateDbClient.mockRejectedValue(
+ new Error('connect ETIMEDOUT 127.0.0.1:5432')
+ );
+ const exportBetterAuthUsers = await getExportFn();
+
+ await expect(exportBetterAuthUsers(dbUrl, 'out.json')).rejects.toThrow(
+ 'database server is unreachable'
+ );
+ });
+
+ test('shows password hint for authentication errors', async () => {
+ mockCreateDbClient.mockRejectedValue(
+ new Error('password authentication failed for user "user"')
+ );
+ const exportBetterAuthUsers = await getExportFn();
+
+ await expect(exportBetterAuthUsers(dbUrl, 'out.json')).rejects.toThrow(
+ 'Check the password'
+ );
+ });
+
+ test('shows generic hint for unknown connection errors', async () => {
+ mockCreateDbClient.mockRejectedValue(new Error('some unexpected error'));
+ const exportBetterAuthUsers = await getExportFn();
+
+ await expect(exportBetterAuthUsers(dbUrl, 'out.json')).rejects.toThrow(
+ 'Verify your connection string'
+ );
+ });
+
+ test('shows table hint when user table does not exist', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('postgres');
+ // First query is detectPluginColumns
+ mockQuery.mockRejectedValue(new Error('relation "user" does not exist'));
+ const exportBetterAuthUsers = await getExportFn();
+
+ await expect(exportBetterAuthUsers(dbUrl, 'out.json')).rejects.toThrow(
+ 'Better Auth has been set up'
+ );
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('shows permission hint when access is denied', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('postgres');
+ mockQuery.mockRejectedValue(
+ new Error('permission denied for table "user"')
+ );
+ const exportBetterAuthUsers = await getExportFn();
+
+ await expect(exportBetterAuthUsers(dbUrl, 'out.json')).rejects.toThrow(
+ 'Better Auth has been set up'
+ );
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('exports users with correct field coverage', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('postgres');
+ // First query: detectPluginColumns
+ mockQuery.mockResolvedValueOnce({
+ rows: [{ column_name: 'username' }, { column_name: 'phoneNumber' }],
+ });
+ // Second query: actual export
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ {
+ user_id: 'user_1',
+ email: 'test@example.com',
+ email_verified: true,
+ name: 'Test User',
+ password_hash: '$2a$10$hash...',
+ username: 'testuser',
+ phone_number: null,
+ },
+ {
+ user_id: 'user_2',
+ email: 'other@example.com',
+ email_verified: false,
+ name: null,
+ password_hash: null,
+ username: null,
+ phone_number: '+1234567890',
+ },
+ ],
+ });
+ const exportBetterAuthUsers = await getExportFn();
+
+ const result = await exportBetterAuthUsers(dbUrl, 'test-output.json');
+
+ expect(result.userCount).toBe(2);
+ expect(result.detectedPlugins).toContain('username');
+ expect(result.detectedPlugins).toContain('phone number');
+ expect(result.fieldCoverage.email).toBe(2);
+ expect(result.fieldCoverage.emailVerified).toBe(1);
+ expect(result.fieldCoverage.name).toBe(1);
+ expect(result.fieldCoverage.password).toBe(1);
+ expect(result.fieldCoverage.username).toBe(1);
+ expect(result.fieldCoverage.phone).toBe(1);
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('writes JSON output to exports directory', async () => {
+ const { mockQuery } = makeMockClient('postgres');
+ mockQuery.mockResolvedValueOnce({ rows: [] }); // detectPluginColumns
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ {
+ user_id: 'user_1',
+ email: 'test@example.com',
+ email_verified: true,
+ name: 'Test',
+ password_hash: null,
+ },
+ ],
+ });
+ const exportBetterAuthUsers = await getExportFn();
+
+ await exportBetterAuthUsers(dbUrl, 'test-output.json');
+
+ expect(mockMkdirSync).toHaveBeenCalledWith(
+ expect.stringContaining('exports'),
+ { recursive: true }
+ );
+ expect(mockWriteFileSync).toHaveBeenCalledWith(
+ expect.stringContaining('test-output.json'),
+ expect.any(String)
+ );
+
+ const writtenJson = mockWriteFileSync.mock.calls[0][1] as string;
+ const parsed = JSON.parse(writtenJson);
+ expect(Array.isArray(parsed)).toBe(true);
+ expect(parsed[0].user_id).toBe('user_1');
+ });
+
+ test('logs each user via exportLogger', async () => {
+ const { mockQuery } = makeMockClient('postgres');
+ mockQuery.mockResolvedValueOnce({ rows: [] }); // detectPluginColumns
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ {
+ user_id: 'user_1',
+ email: 'a@example.com',
+ email_verified: true,
+ name: 'A',
+ password_hash: null,
+ },
+ {
+ user_id: 'user_2',
+ email: 'b@example.com',
+ email_verified: false,
+ name: 'B',
+ password_hash: null,
+ },
+ ],
+ });
+ const exportBetterAuthUsers = await getExportFn();
+
+ await exportBetterAuthUsers(dbUrl, 'test-output.json');
+
+ expect(mockExportLogger).toHaveBeenCalledTimes(2);
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_1', status: 'success' },
+ expect.any(String)
+ );
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_2', status: 'success' },
+ expect.any(String)
+ );
+ });
+
+ test('builds query with detected plugin columns', async () => {
+ const { mockQuery } = makeMockClient('postgres');
+ // Detect all plugin columns
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ { column_name: 'username' },
+ { column_name: 'displayUsername' },
+ { column_name: 'banned' },
+ { column_name: 'role' },
+ { column_name: 'twoFactorEnabled' },
+ ],
+ });
+ mockQuery.mockResolvedValueOnce({ rows: [] });
+ const exportBetterAuthUsers = await getExportFn();
+
+ const result = await exportBetterAuthUsers(dbUrl, 'test-output.json');
+
+ expect(result.detectedPlugins).toContain('username');
+ expect(result.detectedPlugins).toContain('admin');
+ expect(result.detectedPlugins).toContain('two-factor');
+
+ // Verify the second query (export) was called with a query containing plugin columns
+ const exportQuery = mockQuery.mock.calls[1][0] as string;
+ expect(exportQuery).toContain('"username"');
+ expect(exportQuery).toContain('"displayUsername"');
+ expect(exportQuery).toContain('"banned"');
+ expect(exportQuery).toContain('"role"');
+ expect(exportQuery).toContain('"twoFactorEnabled"');
+ });
+
+ test('re-throws unrecognized query errors', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('postgres');
+ mockQuery.mockResolvedValueOnce({ rows: [] }); // detectPluginColumns
+ mockQuery.mockRejectedValueOnce(new Error('syntax error in SQL'));
+ const exportBetterAuthUsers = await getExportFn();
+
+ await expect(exportBetterAuthUsers(dbUrl, 'out.json')).rejects.toThrow(
+ 'syntax error in SQL'
+ );
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('uses MySQL quoting when client is MySQL', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('mysql');
+ mockQuery.mockResolvedValueOnce({ rows: [] }); // detectPluginColumns
+ mockQuery.mockResolvedValueOnce({ rows: [] }); // export query
+ const exportBetterAuthUsers = await getExportFn();
+
+ await exportBetterAuthUsers(
+ 'mysql://user:pass@localhost:3306/db',
+ 'test-output.json'
+ );
+
+ // Verify the export query uses backticks
+ const exportQuery = mockQuery.mock.calls[1][0] as string;
+ expect(exportQuery).toContain('`id`');
+ expect(exportQuery).toContain('`user`');
+ expect(exportQuery).toContain('`account`');
+ expect(mockEnd).toHaveBeenCalled();
+ });
+
+ test('uses SQLite PRAGMA for plugin detection', async () => {
+ const { mockQuery, mockEnd } = makeMockClient('sqlite');
+ // PRAGMA table_info returns column names
+ mockQuery.mockResolvedValueOnce({
+ rows: [
+ { name: 'id' },
+ { name: 'email' },
+ { name: 'emailVerified' },
+ { name: 'name' },
+ { name: 'createdAt' },
+ { name: 'updatedAt' },
+ { name: 'username' },
+ ],
+ });
+ mockQuery.mockResolvedValueOnce({ rows: [] }); // export query
+ const exportBetterAuthUsers = await getExportFn();
+
+ const result = await exportBetterAuthUsers(
+ '/path/to/db.sqlite',
+ 'test-output.json'
+ );
+
+ expect(result.detectedPlugins).toContain('username');
+ // Verify the first query was PRAGMA
+ const detectQuery = mockQuery.mock.calls[0][0] as string;
+ expect(detectQuery).toContain('PRAGMA table_info');
+ expect(mockEnd).toHaveBeenCalled();
+ });
+});
diff --git a/tests/export-clerk.test.ts b/tests/export-clerk.test.ts
new file mode 100644
index 0000000..f5f10bd
--- /dev/null
+++ b/tests/export-clerk.test.ts
@@ -0,0 +1,451 @@
+import { beforeEach, describe, expect, test, vi } from 'vitest';
+
+// Mock @clerk/backend
+const mockGetUserList = vi.fn();
+
+vi.mock('@clerk/backend', () => ({
+ createClerkClient: () => ({
+ users: {
+ getUserList: mockGetUserList,
+ },
+ }),
+}));
+
+// Mock envs-constants
+vi.mock('../src/envs-constants', () => ({
+ env: {
+ CLERK_SECRET_KEY: 'sk_test_xxx',
+ RATE_LIMIT: 10,
+ CONCURRENCY_LIMIT: 1,
+ },
+}));
+
+// Mock fs to avoid writing files during tests
+const mockWriteFileSync = vi.fn();
+const mockMkdirSync = vi.fn();
+
+vi.mock('fs', async () => {
+ const actual = await vi.importActual('fs');
+ return {
+ ...actual,
+ default: {
+ ...(actual as Record),
+ writeFileSync: mockWriteFileSync,
+ mkdirSync: mockMkdirSync,
+ appendFileSync: vi.fn(),
+ },
+ };
+});
+
+// Mock logger
+const mockExportLogger = vi.fn();
+
+vi.mock('../src/logger', () => ({
+ exportLogger: mockExportLogger,
+ closeAllStreams: vi.fn(),
+}));
+
+// ============================================================================
+// mapUserToExport tests
+// ============================================================================
+
+describe('mapUserToExport', () => {
+ async function getMapFn() {
+ const mod = await import('../src/export/clerk');
+ return mod.mapUserToExport;
+ }
+
+ function makeUser(overrides: Record = {}) {
+ return {
+ id: 'user_test123',
+ emailAddresses: [],
+ phoneNumbers: [],
+ primaryEmailAddress: null,
+ primaryPhoneAddress: null,
+ primaryPhoneNumber: null,
+ username: null,
+ firstName: null,
+ lastName: null,
+ publicMetadata: {},
+ privateMetadata: {},
+ unsafeMetadata: {},
+ banned: false,
+ createOrganizationEnabled: true,
+ createOrganizationsLimit: null,
+ deleteSelfEnabled: true,
+ passwordEnabled: false,
+ totpEnabled: false,
+ backupCodeEnabled: false,
+ createdAt: 1700000000000,
+ legalAcceptedAt: null,
+ ...overrides,
+ };
+ }
+
+ test('maps primary email correctly', async () => {
+ const mapUserToExport = await getMapFn();
+ const user = makeUser({
+ primaryEmailAddress: {
+ emailAddress: 'primary@example.com',
+ },
+ emailAddresses: [
+ {
+ emailAddress: 'primary@example.com',
+ verification: { status: 'verified' },
+ },
+ ],
+ });
+
+ const result = mapUserToExport(user as never);
+ expect(result.email).toBe('primary@example.com');
+ expect(result.emailAddresses).toBeUndefined();
+ });
+
+ test('separates verified and unverified emails', async () => {
+ const mapUserToExport = await getMapFn();
+ const user = makeUser({
+ primaryEmailAddress: {
+ emailAddress: 'primary@example.com',
+ },
+ emailAddresses: [
+ {
+ emailAddress: 'primary@example.com',
+ verification: { status: 'verified' },
+ },
+ {
+ emailAddress: 'extra@example.com',
+ verification: { status: 'verified' },
+ },
+ {
+ emailAddress: 'unverified@example.com',
+ verification: { status: 'unverified' },
+ },
+ ],
+ });
+
+ const result = mapUserToExport(user as never);
+ expect(result.email).toBe('primary@example.com');
+ expect(result.emailAddresses).toEqual(['extra@example.com']);
+ expect(result.unverifiedEmailAddresses).toEqual(['unverified@example.com']);
+ });
+
+ test('separates verified and unverified phones', async () => {
+ const mapUserToExport = await getMapFn();
+ const user = makeUser({
+ primaryPhoneNumber: {
+ phoneNumber: '+1234567890',
+ },
+ phoneNumbers: [
+ {
+ phoneNumber: '+1234567890',
+ verification: { status: 'verified' },
+ },
+ {
+ phoneNumber: '+0987654321',
+ verification: { status: 'unverified' },
+ },
+ ],
+ });
+
+ const result = mapUserToExport(user as never);
+ expect(result.phone).toBe('+1234567890');
+ expect(result.phoneNumbers).toBeUndefined();
+ expect(result.unverifiedPhoneNumbers).toEqual(['+0987654321']);
+ });
+
+ test('converts timestamps from unix ms to ISO strings', async () => {
+ const mapUserToExport = await getMapFn();
+ const user = makeUser({
+ createdAt: 1700000000000,
+ legalAcceptedAt: 1700000000000,
+ });
+
+ const result = mapUserToExport(user as never);
+ expect(result.createdAt).toBe(new Date(1700000000000).toISOString());
+ expect(result.legalAcceptedAt).toBe(new Date(1700000000000).toISOString());
+ });
+
+ test('omits null/empty fields', async () => {
+ const mapUserToExport = await getMapFn();
+ const user = makeUser({
+ username: null,
+ firstName: null,
+ lastName: null,
+ legalAcceptedAt: null,
+ });
+
+ const result = mapUserToExport(user as never);
+ expect(result.username).toBeUndefined();
+ expect(result.firstName).toBeUndefined();
+ expect(result.lastName).toBeUndefined();
+ expect(result.legalAcceptedAt).toBeUndefined();
+ });
+
+ test('includes metadata only when non-empty', async () => {
+ const mapUserToExport = await getMapFn();
+
+ // Empty metadata should be omitted
+ const userEmpty = makeUser({
+ publicMetadata: {},
+ privateMetadata: {},
+ unsafeMetadata: {},
+ });
+ const emptyResult = mapUserToExport(userEmpty as never);
+ expect(emptyResult.publicMetadata).toBeUndefined();
+ expect(emptyResult.privateMetadata).toBeUndefined();
+ expect(emptyResult.unsafeMetadata).toBeUndefined();
+
+ // Non-empty metadata should be included
+ const userWithMeta = makeUser({
+ publicMetadata: { role: 'admin' },
+ privateMetadata: { stripe_id: 'cus_123' },
+ unsafeMetadata: { theme: 'dark' },
+ });
+ const metaResult = mapUserToExport(userWithMeta as never);
+ expect(metaResult.publicMetadata).toEqual({ role: 'admin' });
+ expect(metaResult.privateMetadata).toEqual({ stripe_id: 'cus_123' });
+ expect(metaResult.unsafeMetadata).toEqual({ theme: 'dark' });
+ });
+
+ test('includes simple fields when present', async () => {
+ const mapUserToExport = await getMapFn();
+ const user = makeUser({
+ username: 'jdoe',
+ firstName: 'John',
+ lastName: 'Doe',
+ });
+
+ const result = mapUserToExport(user as never);
+ expect(result.username).toBe('jdoe');
+ expect(result.firstName).toBe('John');
+ expect(result.lastName).toBe('Doe');
+ });
+
+ test('includes banned flag only when true', async () => {
+ const mapUserToExport = await getMapFn();
+
+ const notBanned = makeUser({ banned: false });
+ expect(mapUserToExport(notBanned as never).banned).toBeUndefined();
+
+ const banned = makeUser({ banned: true });
+ expect(mapUserToExport(banned as never).banned).toBe(true);
+ });
+
+ test('includes createOrganizationsLimit only when not null', async () => {
+ const mapUserToExport = await getMapFn();
+
+ const noLimit = makeUser({ createOrganizationsLimit: null });
+ expect(
+ mapUserToExport(noLimit as never).createOrganizationsLimit
+ ).toBeUndefined();
+
+ const withLimit = makeUser({ createOrganizationsLimit: 5 });
+ expect(mapUserToExport(withLimit as never).createOrganizationsLimit).toBe(
+ 5
+ );
+ });
+
+ test('handles verified emails with no primary set', async () => {
+ const mapUserToExport = await getMapFn();
+ const user = makeUser({
+ primaryEmailAddress: null,
+ emailAddresses: [
+ {
+ emailAddress: 'a@example.com',
+ verification: { status: 'verified' },
+ },
+ {
+ emailAddress: 'b@example.com',
+ verification: { status: 'verified' },
+ },
+ ],
+ });
+
+ const result = mapUserToExport(user as never);
+ expect(result.email).toBe('a@example.com');
+ expect(result.emailAddresses).toEqual(['b@example.com']);
+ });
+});
+
+// ============================================================================
+// exportClerkUsers tests
+// ============================================================================
+
+describe('exportClerkUsers', () => {
+ beforeEach(() => {
+ vi.clearAllMocks();
+ });
+
+ async function getExportFn() {
+ const mod = await import('../src/export/clerk');
+ return mod.exportClerkUsers;
+ }
+
+ function makeUser(overrides: Record = {}) {
+ return {
+ id: 'user_test123',
+ emailAddresses: [
+ {
+ emailAddress: 'test@example.com',
+ verification: { status: 'verified' },
+ },
+ ],
+ phoneNumbers: [],
+ primaryEmailAddress: {
+ emailAddress: 'test@example.com',
+ },
+ primaryPhoneNumber: null,
+ username: null,
+ firstName: 'Test',
+ lastName: null,
+ publicMetadata: {},
+ privateMetadata: {},
+ unsafeMetadata: {},
+ banned: false,
+ createOrganizationEnabled: true,
+ createOrganizationsLimit: null,
+ deleteSelfEnabled: true,
+ passwordEnabled: true,
+ totpEnabled: false,
+ backupCodeEnabled: false,
+ createdAt: 1700000000000,
+ legalAcceptedAt: null,
+ ...overrides,
+ };
+ }
+
+ test('paginates when results equal LIMIT', async () => {
+ const exportClerkUsers = await getExportFn();
+
+ // First call returns 500 users (full page), second returns less
+ const page1 = Array.from({ length: 500 }, (_, i) =>
+ makeUser({ id: `user_${i}` })
+ );
+ const page2 = [makeUser({ id: 'user_500' })];
+
+ mockGetUserList
+ .mockResolvedValueOnce({ data: page1 })
+ .mockResolvedValueOnce({ data: page2 });
+
+ const result = await exportClerkUsers('test-output.json');
+
+ expect(mockGetUserList).toHaveBeenCalledTimes(2);
+ expect(mockGetUserList).toHaveBeenCalledWith({
+ offset: 0,
+ limit: 500,
+ });
+ expect(mockGetUserList).toHaveBeenCalledWith({
+ offset: 500,
+ limit: 500,
+ });
+ expect(result.userCount).toBe(501);
+ });
+
+ test('does not paginate when results are less than LIMIT', async () => {
+ const exportClerkUsers = await getExportFn();
+
+ const users = [makeUser(), makeUser({ id: 'user_2' })];
+ mockGetUserList.mockResolvedValueOnce({ data: users });
+
+ const result = await exportClerkUsers('test-output.json');
+
+ expect(mockGetUserList).toHaveBeenCalledTimes(1);
+ expect(result.userCount).toBe(2);
+ });
+
+ test('writes correct JSON output', async () => {
+ const exportClerkUsers = await getExportFn();
+
+ mockGetUserList.mockResolvedValueOnce({
+ data: [makeUser()],
+ });
+
+ await exportClerkUsers('test-output.json');
+
+ expect(mockMkdirSync).toHaveBeenCalledWith(
+ expect.stringContaining('exports'),
+ { recursive: true }
+ );
+ expect(mockWriteFileSync).toHaveBeenCalledWith(
+ expect.stringContaining('test-output.json'),
+ expect.any(String)
+ );
+
+ // Verify the written JSON is valid
+ const writtenJson = mockWriteFileSync.mock.calls[0][1] as string;
+ const parsed = JSON.parse(writtenJson);
+ expect(Array.isArray(parsed)).toBe(true);
+ expect(parsed[0].userId).toBe('user_test123');
+ expect(parsed[0].email).toBe('test@example.com');
+ });
+
+ test('returns accurate field coverage counts', async () => {
+ const exportClerkUsers = await getExportFn();
+
+ const users = [
+ makeUser({
+ id: 'user_1',
+ username: 'john',
+ firstName: 'John',
+ lastName: 'Doe',
+ passwordEnabled: true,
+ }),
+ makeUser({
+ id: 'user_2',
+ username: null,
+ firstName: 'Jane',
+ lastName: null,
+ passwordEnabled: false,
+ phoneNumbers: [
+ {
+ phoneNumber: '+1234567890',
+ verification: { status: 'verified' },
+ },
+ ],
+ primaryPhoneNumber: { phoneNumber: '+1234567890' },
+ }),
+ ];
+
+ mockGetUserList.mockResolvedValueOnce({ data: users });
+
+ const result = await exportClerkUsers('test-output.json');
+
+ expect(result.fieldCoverage.email).toBe(2);
+ expect(result.fieldCoverage.username).toBe(1);
+ expect(result.fieldCoverage.firstName).toBe(2);
+ expect(result.fieldCoverage.lastName).toBe(1);
+ expect(result.fieldCoverage.phone).toBe(1);
+ expect(result.fieldCoverage.password).toBe(1);
+ });
+
+ test('logs each user via exportLogger', async () => {
+ const exportClerkUsers = await getExportFn();
+
+ const users = [makeUser({ id: 'user_1' }), makeUser({ id: 'user_2' })];
+
+ mockGetUserList.mockResolvedValueOnce({ data: users });
+
+ await exportClerkUsers('test-output.json');
+
+ expect(mockExportLogger).toHaveBeenCalledTimes(2);
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_1', status: 'success' },
+ expect.any(String)
+ );
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_2', status: 'success' },
+ expect.any(String)
+ );
+ });
+
+ test('returns output path inside exports directory', async () => {
+ const exportClerkUsers = await getExportFn();
+
+ mockGetUserList.mockResolvedValueOnce({ data: [makeUser()] });
+
+ const result = await exportClerkUsers('clerk-export.json');
+
+ expect(result.outputPath).toContain('exports');
+ expect(result.outputPath).toContain('clerk-export.json');
+ });
+});
diff --git a/tests/export-firebase.test.ts b/tests/export-firebase.test.ts
new file mode 100644
index 0000000..0e9b107
--- /dev/null
+++ b/tests/export-firebase.test.ts
@@ -0,0 +1,314 @@
+import { beforeEach, describe, expect, test, vi } from 'vitest';
+
+// Use vi.hoisted so variables are available when vi.mock factories run
+const {
+ mockListUsers,
+ mockInitializeApp,
+ mockDeleteApp,
+ mockReadFileSync,
+ mockExistsSync,
+ mockWriteFileSync,
+ mockMkdirSync,
+ mockExportLogger,
+} = vi.hoisted(() => ({
+ mockListUsers: vi.fn(),
+ mockInitializeApp: vi.fn(),
+ mockDeleteApp: vi.fn(),
+ mockReadFileSync: vi.fn(),
+ mockExistsSync: vi.fn(),
+ mockWriteFileSync: vi.fn(),
+ mockMkdirSync: vi.fn(),
+ mockExportLogger: vi.fn(),
+}));
+
+// Mock firebase-admin
+vi.mock('firebase-admin', () => {
+ const mockApp = { delete: mockDeleteApp };
+ mockInitializeApp.mockReturnValue(mockApp);
+
+ return {
+ initializeApp: mockInitializeApp,
+ credential: {
+ cert: vi.fn((sa) => sa),
+ },
+ auth: () => ({
+ listUsers: mockListUsers,
+ }),
+ app: () => mockApp,
+ };
+});
+
+// Mock fs
+vi.mock('fs', async () => {
+ const actual = await vi.importActual('fs');
+ return {
+ ...actual,
+ default: {
+ ...(actual as Record),
+ readFileSync: mockReadFileSync,
+ existsSync: mockExistsSync,
+ writeFileSync: mockWriteFileSync,
+ mkdirSync: mockMkdirSync,
+ appendFileSync: vi.fn(),
+ },
+ };
+});
+
+// Mock logger
+vi.mock('../src/logger', () => ({
+ exportLogger: mockExportLogger,
+ closeAllStreams: vi.fn(),
+}));
+
+// ============================================================================
+// mapFirebaseUserToExport tests
+// ============================================================================
+
+describe('mapFirebaseUserToExport', () => {
+ async function getMapFn() {
+ const mod = await import('../src/export/firebase');
+ return mod.mapFirebaseUserToExport;
+ }
+
+ function makeUserRecord(overrides: Record = {}) {
+ return {
+ uid: 'firebase_uid_123',
+ email: 'test@example.com',
+ emailVerified: true,
+ passwordHash: 'base64hash',
+ passwordSalt: 'base64salt',
+ displayName: 'Test User',
+ phoneNumber: '+1234567890',
+ disabled: false,
+ metadata: {
+ creationTime: '2025-01-01T00:00:00.000Z',
+ lastSignInTime: '2025-06-01T00:00:00.000Z',
+ },
+ ...overrides,
+ };
+ }
+
+ test('maps core fields correctly', async () => {
+ const mapFirebaseUserToExport = await getMapFn();
+ const result = mapFirebaseUserToExport(makeUserRecord() as never);
+
+ expect(result.localId).toBe('firebase_uid_123');
+ expect(result.email).toBe('test@example.com');
+ expect(result.emailVerified).toBe(true);
+ expect(result.passwordHash).toBe('base64hash');
+ expect(result.passwordSalt).toBe('base64salt');
+ expect(result.displayName).toBe('Test User');
+ expect(result.phoneNumber).toBe('+1234567890');
+ });
+
+ test('converts metadata timestamps to milliseconds', async () => {
+ const mapFirebaseUserToExport = await getMapFn();
+ const result = mapFirebaseUserToExport(makeUserRecord() as never);
+
+ expect(result.createdAt).toBe(
+ new Date('2025-01-01T00:00:00.000Z').getTime()
+ );
+ expect(result.lastSignedInAt).toBe(
+ new Date('2025-06-01T00:00:00.000Z').getTime()
+ );
+ });
+
+ test('omits null/undefined fields', async () => {
+ const mapFirebaseUserToExport = await getMapFn();
+ const result = mapFirebaseUserToExport(
+ makeUserRecord({
+ email: undefined,
+ passwordHash: undefined,
+ passwordSalt: undefined,
+ displayName: undefined,
+ phoneNumber: undefined,
+ disabled: false,
+ }) as never
+ );
+
+ expect(result.localId).toBe('firebase_uid_123');
+ expect(result.email).toBeUndefined();
+ expect(result.passwordHash).toBeUndefined();
+ expect(result.passwordSalt).toBeUndefined();
+ expect(result.displayName).toBeUndefined();
+ expect(result.phoneNumber).toBeUndefined();
+ expect(result.disabled).toBeUndefined(); // false → omitted
+ });
+});
+
+// ============================================================================
+// exportFirebaseUsers tests
+// ============================================================================
+
+describe('exportFirebaseUsers', () => {
+ const serviceAccountPath = '/path/to/service-account.json';
+ const serviceAccountJson = JSON.stringify({
+ project_id: 'test-project',
+ client_email: 'test@test.iam.gserviceaccount.com',
+ private_key: 'test-key',
+ });
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+ mockReadFileSync.mockReturnValue(serviceAccountJson);
+ mockDeleteApp.mockResolvedValue(undefined);
+ });
+
+ async function getExportFn() {
+ const mod = await import('../src/export/firebase');
+ return mod.exportFirebaseUsers;
+ }
+
+ function makeUserRecord(overrides: Record = {}) {
+ return {
+ uid: 'firebase_uid_123',
+ email: 'test@example.com',
+ emailVerified: true,
+ passwordHash: 'hash123',
+ passwordSalt: 'salt123',
+ displayName: 'Test User',
+ phoneNumber: '+1234567890',
+ disabled: false,
+ metadata: {
+ creationTime: '2025-01-01T00:00:00.000Z',
+ lastSignInTime: '2025-06-01T00:00:00.000Z',
+ },
+ ...overrides,
+ };
+ }
+
+ test('paginates with pageToken', async () => {
+ const exportFirebaseUsers = await getExportFn();
+
+ mockListUsers
+ .mockResolvedValueOnce({
+ users: [makeUserRecord({ uid: 'user_1' })],
+ pageToken: 'next-page-token',
+ })
+ .mockResolvedValueOnce({
+ users: [makeUserRecord({ uid: 'user_2' })],
+ pageToken: undefined,
+ });
+
+ const result = await exportFirebaseUsers(
+ serviceAccountPath,
+ 'test-output.json'
+ );
+
+ expect(mockListUsers).toHaveBeenCalledTimes(2);
+ expect(mockListUsers).toHaveBeenCalledWith(1000, undefined);
+ expect(mockListUsers).toHaveBeenCalledWith(1000, 'next-page-token');
+ expect(result.userCount).toBe(2);
+ });
+
+ test('returns accurate field coverage counts', async () => {
+ const exportFirebaseUsers = await getExportFn();
+
+ mockListUsers.mockResolvedValueOnce({
+ users: [
+ makeUserRecord({
+ uid: 'user_1',
+ email: 'a@test.com',
+ emailVerified: true,
+ passwordHash: 'hash',
+ displayName: 'User One',
+ phoneNumber: '+1234567890',
+ }),
+ makeUserRecord({
+ uid: 'user_2',
+ email: 'b@test.com',
+ emailVerified: false,
+ passwordHash: undefined,
+ displayName: undefined,
+ phoneNumber: undefined,
+ }),
+ ],
+ pageToken: undefined,
+ });
+
+ const result = await exportFirebaseUsers(
+ serviceAccountPath,
+ 'test-output.json'
+ );
+
+ expect(result.fieldCoverage.email).toBe(2);
+ expect(result.fieldCoverage.emailVerified).toBe(1);
+ expect(result.fieldCoverage.passwordHash).toBe(1);
+ expect(result.fieldCoverage.phone).toBe(1);
+ expect(result.fieldCoverage.displayName).toBe(1);
+ });
+
+ test('writes JSON output to exports directory', async () => {
+ const exportFirebaseUsers = await getExportFn();
+
+ mockListUsers.mockResolvedValueOnce({
+ users: [makeUserRecord()],
+ pageToken: undefined,
+ });
+
+ await exportFirebaseUsers(serviceAccountPath, 'test-output.json');
+
+ expect(mockMkdirSync).toHaveBeenCalledWith(
+ expect.stringContaining('exports'),
+ { recursive: true }
+ );
+ expect(mockWriteFileSync).toHaveBeenCalledWith(
+ expect.stringContaining('test-output.json'),
+ expect.any(String)
+ );
+
+ const writtenJson = mockWriteFileSync.mock.calls[0][1] as string;
+ const parsed = JSON.parse(writtenJson);
+ expect(Array.isArray(parsed)).toBe(true);
+ expect(parsed[0].localId).toBe('firebase_uid_123');
+ });
+
+ test('logs each user via exportLogger', async () => {
+ const exportFirebaseUsers = await getExportFn();
+
+ mockListUsers.mockResolvedValueOnce({
+ users: [
+ makeUserRecord({ uid: 'user_1' }),
+ makeUserRecord({ uid: 'user_2' }),
+ ],
+ pageToken: undefined,
+ });
+
+ await exportFirebaseUsers(serviceAccountPath, 'test-output.json');
+
+ expect(mockExportLogger).toHaveBeenCalledTimes(2);
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_1', status: 'success' },
+ expect.any(String)
+ );
+ expect(mockExportLogger).toHaveBeenCalledWith(
+ { userId: 'user_2', status: 'success' },
+ expect.any(String)
+ );
+ });
+
+ test('cleans up Firebase app in finally block', async () => {
+ const exportFirebaseUsers = await getExportFn();
+
+ mockListUsers.mockResolvedValueOnce({
+ users: [makeUserRecord()],
+ pageToken: undefined,
+ });
+
+ await exportFirebaseUsers(serviceAccountPath, 'test-output.json');
+
+ expect(mockDeleteApp).toHaveBeenCalled();
+ });
+
+ test('cleans up Firebase app even on error', async () => {
+ const exportFirebaseUsers = await getExportFn();
+
+ mockListUsers.mockRejectedValue(new Error('Auth error'));
+
+ await expect(
+ exportFirebaseUsers(serviceAccountPath, 'test-output.json')
+ ).rejects.toThrow('Auth error');
+
+ expect(mockDeleteApp).toHaveBeenCalled();
+ });
+});
diff --git a/tests/export-utils.test.ts b/tests/export-utils.test.ts
new file mode 100644
index 0000000..98d3dc6
--- /dev/null
+++ b/tests/export-utils.test.ts
@@ -0,0 +1,283 @@
+import { describe, expect, test, vi } from 'vitest';
+
+// Mock fs to avoid writing files during tests
+const mockWriteFileSync = vi.fn();
+const mockMkdirSync = vi.fn();
+
+vi.mock('fs', async () => {
+ const actual = await vi.importActual('fs');
+ return {
+ ...actual,
+ default: {
+ ...(actual as Record),
+ writeFileSync: mockWriteFileSync,
+ mkdirSync: mockMkdirSync,
+ },
+ };
+});
+
+// Mock @clack/prompts
+const mockNote = vi.fn();
+const mockLogSuccess = vi.fn();
+
+vi.mock('@clack/prompts', () => ({
+ note: mockNote,
+ log: {
+ success: mockLogSuccess,
+ },
+}));
+
+// ============================================================================
+// getCoverageIcon tests
+// ============================================================================
+
+describe('getCoverageIcon', () => {
+ async function getIconFn() {
+ const mod = await import('../src/lib/export');
+ return mod.getCoverageIcon;
+ }
+
+ test('returns green icon when count equals total', async () => {
+ const getCoverageIcon = await getIconFn();
+ const result = getCoverageIcon(10, 10);
+ // Should contain the filled circle character
+ expect(result).toContain('●');
+ });
+
+ test('returns yellow icon when count is partial', async () => {
+ const getCoverageIcon = await getIconFn();
+ const result = getCoverageIcon(5, 10);
+ // Should contain the open circle character
+ expect(result).toContain('○');
+ });
+
+ test('returns dim icon when count is zero', async () => {
+ const getCoverageIcon = await getIconFn();
+ const result = getCoverageIcon(0, 10);
+ // Should contain the open circle character
+ expect(result).toContain('○');
+ });
+
+ test('returns green icon for zero total with zero count', async () => {
+ const getCoverageIcon = await getIconFn();
+ const result = getCoverageIcon(0, 0);
+ // 0 === 0 is true, so green
+ expect(result).toContain('●');
+ });
+});
+
+// ============================================================================
+// writeExportOutput tests
+// ============================================================================
+
+describe('writeExportOutput', () => {
+ async function getWriteFn() {
+ const mod = await import('../src/lib/export');
+ return mod.writeExportOutput;
+ }
+
+ test('creates exports directory and writes file', async () => {
+ const writeExportOutput = await getWriteFn();
+ const data = [{ id: 1 }, { id: 2 }];
+
+ const result = writeExportOutput(data, 'test.json');
+
+ expect(mockMkdirSync).toHaveBeenCalledWith(
+ expect.stringContaining('exports'),
+ { recursive: true }
+ );
+ expect(mockWriteFileSync).toHaveBeenCalledWith(
+ expect.stringContaining('test.json'),
+ JSON.stringify(data, null, 2)
+ );
+ expect(result).toContain('test.json');
+ });
+
+ test('resolves relative paths inside exports directory', async () => {
+ const writeExportOutput = await getWriteFn();
+
+ const result = writeExportOutput([], 'output.json');
+
+ expect(result).toContain('exports');
+ expect(result).toContain('output.json');
+ });
+
+ test('uses absolute paths directly', async () => {
+ const writeExportOutput = await getWriteFn();
+
+ const result = writeExportOutput([], '/tmp/absolute-output.json');
+
+ expect(result).toBe('/tmp/absolute-output.json');
+ });
+});
+
+// ============================================================================
+// displayFieldCoverage tests
+// ============================================================================
+
+describe('displayFieldCoverage', () => {
+ async function getDisplayFn() {
+ const mod = await import('../src/lib/export');
+ return mod.displayFieldCoverage;
+ }
+
+ test('calls p.note with field summary and p.log.success with count', async () => {
+ const displayFieldCoverage = await getDisplayFn();
+
+ mockNote.mockClear();
+ mockLogSuccess.mockClear();
+
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: 10 },
+ { label: 'have phone', count: 5 },
+ ],
+ 10,
+ '/path/to/output.json'
+ );
+
+ expect(mockNote).toHaveBeenCalledTimes(1);
+ expect(mockNote).toHaveBeenCalledWith(
+ expect.stringContaining('10'),
+ 'Field Coverage'
+ );
+ expect(mockLogSuccess).toHaveBeenCalledTimes(1);
+ expect(mockLogSuccess).toHaveBeenCalledWith(expect.stringContaining('10'));
+ });
+
+ test('includes all field labels in summary', async () => {
+ const displayFieldCoverage = await getDisplayFn();
+
+ mockNote.mockClear();
+
+ displayFieldCoverage(
+ [
+ { label: 'have email', count: 3 },
+ { label: 'have username', count: 1 },
+ { label: 'have password', count: 0 },
+ ],
+ 3,
+ '/path/to/out.json'
+ );
+
+ const summaryArg = mockNote.mock.calls[0][0] as string;
+ expect(summaryArg).toContain('have email');
+ expect(summaryArg).toContain('have username');
+ expect(summaryArg).toContain('have password');
+ });
+});
+
+// ============================================================================
+// getDbConnectionErrorHint tests
+// ============================================================================
+
+describe('getDbConnectionErrorHint', () => {
+ async function getHintFn() {
+ const mod = await import('../src/lib/export');
+ return mod.getDbConnectionErrorHint;
+ }
+
+ test('returns hostname hint for ENOTFOUND (no platform)', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint('getaddrinfo ENOTFOUND host.db.co');
+ expect(hint).toContain('hostname could not be resolved');
+ expect(hint).toContain('Check the host');
+ });
+
+ test('returns supabase-specific hostname hint for ENOTFOUND', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'getaddrinfo ENOTFOUND db.abc.supabase.co',
+ 'supabase'
+ );
+ expect(hint).toContain('hostname could not be resolved');
+ expect(hint).toContain('project ref');
+ });
+
+ test('returns betterauth-specific hostname hint for ENOTFOUND', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'getaddrinfo ENOTFOUND localhost',
+ 'betterauth'
+ );
+ expect(hint).toContain('hostname could not be resolved');
+ expect(hint).toContain('Check the host');
+ });
+
+ test('returns IPv4 hint for ETIMEDOUT with supabase', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'connect ETIMEDOUT 1.2.3.4:5432',
+ 'supabase'
+ );
+ expect(hint).toContain('IPv4 add-on');
+ });
+
+ test('returns unreachable hint for ETIMEDOUT with betterauth', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'connect ETIMEDOUT 1.2.3.4:5432',
+ 'betterauth'
+ );
+ expect(hint).toContain('database server is unreachable');
+ });
+
+ test('returns IPv4 hint for ENETUNREACH with supabase', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'connect ENETUNREACH 1.2.3.4:5432',
+ 'supabase'
+ );
+ expect(hint).toContain('IPv4 add-on');
+ });
+
+ test('returns unreachable hint for ENETUNREACH with betterauth', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'connect ENETUNREACH 1.2.3.4:5432',
+ 'betterauth'
+ );
+ expect(hint).toContain('database server is unreachable');
+ });
+
+ test('returns password hint for authentication errors', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'password authentication failed for user "postgres"'
+ );
+ expect(hint).toContain('Check the password');
+ });
+
+ test('returns password hint for auth errors with platform', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'authentication failed for user "user"',
+ 'betterauth'
+ );
+ expect(hint).toContain('Check the password');
+ });
+
+ test('returns generic hint for unknown errors (no platform)', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint('some unexpected error');
+ expect(hint).toContain('Verify your connection string');
+ expect(hint).toContain('database is accessible');
+ });
+
+ test('returns supabase-specific generic hint', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint('some unexpected error', 'supabase');
+ expect(hint).toContain('Verify your connection string');
+ expect(hint).toContain('Supabase project');
+ });
+
+ test('returns betterauth-specific generic hint', async () => {
+ const getDbConnectionErrorHint = await getHintFn();
+ const hint = getDbConnectionErrorHint(
+ 'some unexpected error',
+ 'betterauth'
+ );
+ expect(hint).toContain('Verify your connection string');
+ expect(hint).toContain('database is accessible');
+ });
+});
diff --git a/tests/export.test.ts b/tests/export.test.ts
index b0bde20..ecc0d13 100644
--- a/tests/export.test.ts
+++ b/tests/export.test.ts
@@ -1,5 +1,5 @@
import { beforeEach, describe, expect, test, vi } from 'vitest';
-import { isValidConnectionString, resolveConnectionString } from '../src/utils';
+import { isValidConnectionString, resolveConnectionString } from '../src/lib';
// Mock pg Client
const mockConnect = vi.fn();
@@ -16,7 +16,7 @@ vi.mock('pg', () => {
};
});
-// Mock fs.writeFileSync to avoid writing files during tests
+// Mock fs to avoid writing files during tests
vi.mock('fs', async () => {
const actual = await vi.importActual('fs');
return {
@@ -24,6 +24,7 @@ vi.mock('fs', async () => {
default: {
...(actual as Record),
writeFileSync: vi.fn(),
+ mkdirSync: vi.fn(),
},
};
});
@@ -49,8 +50,12 @@ describe('isValidConnectionString', () => {
).toBe(true);
});
- test('rejects non-postgres URLs', () => {
- expect(isValidConnectionString('mysql://host:3306/db')).toBe(false);
+ test('accepts mysql:// URLs', () => {
+ expect(isValidConnectionString('mysql://host:3306/db')).toBe(true);
+ });
+
+ test('rejects non-database URLs', () => {
+ expect(isValidConnectionString('ftp://host/file')).toBe(false);
});
test('rejects plain strings', () => {
@@ -114,13 +119,13 @@ describe('resolveConnectionString', () => {
expect(result.warning).toBeUndefined();
});
- test('returns warning and undefined dbUrl when env var is not a valid Postgres URL', () => {
+ test('returns warning and undefined dbUrl when env var is not a valid connection string', () => {
const result = resolveConnectionString([], {
SUPABASE_DB_URL: 'not-a-valid-url',
});
expect(result.dbUrl).toBeUndefined();
- expect(result.warning).toContain('not a valid Postgres URL');
+ expect(result.warning).toContain('not a valid database connection string');
});
test('returns warning for invalid SUPABASE_DB_URL with https scheme', () => {
@@ -129,7 +134,7 @@ describe('resolveConnectionString', () => {
});
expect(result.dbUrl).toBeUndefined();
- expect(result.warning).toContain('not a valid Postgres URL');
+ expect(result.warning).toContain('not a valid database connection string');
});
test('returns undefined dbUrl and no warning when no env vars set', () => {
diff --git a/tests/lib-db.test.ts b/tests/lib-db.test.ts
new file mode 100644
index 0000000..503589e
--- /dev/null
+++ b/tests/lib-db.test.ts
@@ -0,0 +1,166 @@
+import { describe, expect, test } from 'vitest';
+
+// ============================================================================
+// detectDbType tests
+// ============================================================================
+
+describe('detectDbType', () => {
+ async function getDetectFn() {
+ const mod = await import('../src/lib/db');
+ return mod.detectDbType;
+ }
+
+ test('detects PostgreSQL from postgresql:// prefix', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('postgresql://user:pass@localhost:5432/db')).toBe(
+ 'postgres'
+ );
+ });
+
+ test('detects PostgreSQL from postgres:// prefix', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('postgres://user:pass@localhost:5432/db')).toBe(
+ 'postgres'
+ );
+ });
+
+ test('detects MySQL from mysql:// prefix', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('mysql://user:pass@localhost:3306/db')).toBe('mysql');
+ });
+
+ test('detects MySQL from mysql2:// prefix', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('mysql2://user:pass@localhost:3306/db')).toBe('mysql');
+ });
+
+ test('detects SQLite from file path with .sqlite extension', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('/path/to/database.sqlite')).toBe('sqlite');
+ });
+
+ test('detects SQLite from file path with .db extension', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('/path/to/database.db')).toBe('sqlite');
+ });
+
+ test('detects SQLite from file: prefix', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('file:./data.sqlite')).toBe('sqlite');
+ });
+
+ test('detects SQLite as default for unknown formats', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('./my-database')).toBe('sqlite');
+ });
+
+ test('is case-insensitive for protocol detection', async () => {
+ const detectDbType = await getDetectFn();
+ expect(detectDbType('POSTGRESQL://user:pass@localhost/db')).toBe(
+ 'postgres'
+ );
+ expect(detectDbType('MYSQL://user:pass@localhost/db')).toBe('mysql');
+ });
+});
+
+// ============================================================================
+// isValidConnectionString tests (expanded for MySQL/SQLite)
+// ============================================================================
+
+describe('isValidConnectionString expanded', () => {
+ async function getValidateFn() {
+ const mod = await import('../src/lib/index');
+ return mod.isValidConnectionString;
+ }
+
+ test('accepts PostgreSQL URLs', async () => {
+ const isValidConnectionString = await getValidateFn();
+ expect(
+ isValidConnectionString('postgresql://user:pass@localhost:5432/db')
+ ).toBe(true);
+ expect(
+ isValidConnectionString('postgres://user:pass@localhost:5432/db')
+ ).toBe(true);
+ });
+
+ test('accepts MySQL URLs', async () => {
+ const isValidConnectionString = await getValidateFn();
+ expect(isValidConnectionString('mysql://user:pass@localhost:3306/db')).toBe(
+ true
+ );
+ expect(
+ isValidConnectionString('mysql2://user:pass@localhost:3306/db')
+ ).toBe(true);
+ });
+
+ test('accepts SQLite file paths', async () => {
+ const isValidConnectionString = await getValidateFn();
+ expect(isValidConnectionString('/path/to/database.sqlite')).toBe(true);
+ expect(isValidConnectionString('/path/to/database.sqlite3')).toBe(true);
+ expect(isValidConnectionString('/path/to/database.db')).toBe(true);
+ expect(isValidConnectionString('file:./data.sqlite')).toBe(true);
+ });
+
+ test('rejects invalid strings', async () => {
+ const isValidConnectionString = await getValidateFn();
+ expect(isValidConnectionString('not-a-url')).toBe(false);
+ expect(isValidConnectionString('http://example.com')).toBe(false);
+ expect(isValidConnectionString('')).toBe(false);
+ });
+
+ test('rejects malformed PostgreSQL URLs', async () => {
+ const isValidConnectionString = await getValidateFn();
+ expect(isValidConnectionString('postgresql://')).toBe(true); // Parseable URL, just empty
+ expect(isValidConnectionString('postgresql://[invalid')).toBe(false);
+ });
+});
+
+// ============================================================================
+// resolveConnectionString updated warning message
+// ============================================================================
+
+describe('resolveConnectionString updated warning', () => {
+ async function getResolveFn() {
+ const mod = await import('../src/lib/index');
+ return mod.resolveConnectionString;
+ }
+
+ test('warns with "not a valid database connection string" for invalid env var', async () => {
+ const resolveConnectionString = await getResolveFn();
+ const result = resolveConnectionString(
+ [],
+ { AUTHJS_DB_URL: 'not-a-url' },
+ { envVarName: 'AUTHJS_DB_URL', defaultOutputFile: 'authjs-export.json' }
+ );
+
+ expect(result.dbUrl).toBeUndefined();
+ expect(result.warning).toContain('AUTHJS_DB_URL');
+ expect(result.warning).toContain('not a valid database connection string');
+ });
+
+ test('accepts MySQL URL from env var', async () => {
+ const resolveConnectionString = await getResolveFn();
+ const mysqlUrl = 'mysql://user:pass@localhost:3306/db';
+ const result = resolveConnectionString(
+ [],
+ { AUTHJS_DB_URL: mysqlUrl },
+ { envVarName: 'AUTHJS_DB_URL', defaultOutputFile: 'authjs-export.json' }
+ );
+
+ expect(result.dbUrl).toBe(mysqlUrl);
+ expect(result.warning).toBeUndefined();
+ });
+
+ test('accepts SQLite path from env var', async () => {
+ const resolveConnectionString = await getResolveFn();
+ const sqlitePath = '/path/to/database.sqlite';
+ const result = resolveConnectionString(
+ [],
+ { AUTHJS_DB_URL: sqlitePath },
+ { envVarName: 'AUTHJS_DB_URL', defaultOutputFile: 'authjs-export.json' }
+ );
+
+ expect(result.dbUrl).toBe(sqlitePath);
+ expect(result.warning).toBeUndefined();
+ });
+});
diff --git a/tests/migrate/cli.test.ts b/tests/migrate/cli.test.ts
index dd97e5e..25312a9 100644
--- a/tests/migrate/cli.test.ts
+++ b/tests/migrate/cli.test.ts
@@ -1,18 +1,14 @@
import { beforeEach, describe, expect, test, vi } from 'vitest';
import fs from 'fs';
import path from 'path';
+import { displayCrossReference, loadRawUsers } from '../../src/migrate/cli';
+import { loadSettings, saveSettings } from '../../src/lib/settings';
+import { analyzeFields, hasValue, validateUsers } from '../../src/lib/analysis';
import {
- analyzeFields,
analyzeUserProviders,
- detectInstanceType,
- displayCrossReference,
findUsersWithDisabledProviders,
- hasValue,
- loadRawUsers,
- loadSettings,
- saveSettings,
- validateUsers,
-} from '../../src/migrate/cli';
+} from '../../src/lib/supabase';
+import { detectInstanceType } from '../../src/lib/clerk';
// Mock modules
vi.mock('fs', async () => {
@@ -75,7 +71,7 @@ vi.mock('../../src/envs-constants', () => ({
}));
// Mock the utils module
-vi.mock('../../src/utils', async (importOriginal) => {
+vi.mock('../../src/lib', async (importOriginal) => {
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
const actual = (await importOriginal()) as Record;
return {
diff --git a/tests/migrate/functions.test.ts b/tests/migrate/functions.test.ts
index 388adec..660b111 100644
--- a/tests/migrate/functions.test.ts
+++ b/tests/migrate/functions.test.ts
@@ -1,7 +1,7 @@
import { afterEach, beforeEach, describe, expect, test } from 'vitest';
import { existsSync, readdirSync, unlinkSync } from 'node:fs';
import { loadUsersFromFile } from '../../src/migrate/functions';
-import { transformKeys } from '../../src/utils';
+import { transformKeys } from '../../src/lib';
import { transformers } from '../../src/transformers';
// Snapshot of files in logs/ before each test so we only clean up test-created files
diff --git a/tests/migrate/import-users.test.ts b/tests/migrate/import-users.test.ts
index 28f3e9c..f3bcbe4 100644
--- a/tests/migrate/import-users.test.ts
+++ b/tests/migrate/import-users.test.ts
@@ -51,27 +51,32 @@ vi.mock('picocolors', () => ({
}));
// Mock utils for testing
-vi.mock('../../src/utils', () => ({
- getDateTimeStamp: vi.fn(() => '2024-01-01T12:00:00'),
- tryCatch: async (promise: Promise) => {
- try {
- const data = await promise;
- return [data, null];
- } catch (throwable) {
- if (throwable instanceof Error) return [null, throwable];
- throw throwable;
- }
- },
- getRetryDelay: (
- retryAfterSeconds: number | undefined,
- _defaultDelayMs: number
- ) => {
- // Use a short delay for tests to avoid timeouts
- const delayMs = retryAfterSeconds ? retryAfterSeconds * 1000 : 10; // 10ms instead of _defaultDelayMs
- const delaySeconds = retryAfterSeconds || delayMs / 1000;
- return { delayMs, delaySeconds };
- },
-}));
+vi.mock('../../src/lib', async (importOriginal) => {
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
+ const actual = (await importOriginal()) as Record;
+ return {
+ ...actual,
+ getDateTimeStamp: vi.fn(() => '2024-01-01T12:00:00'),
+ tryCatch: async (promise: Promise) => {
+ try {
+ const data = await promise;
+ return [data, null];
+ } catch (throwable) {
+ if (throwable instanceof Error) return [null, throwable];
+ throw throwable;
+ }
+ },
+ getRetryDelay: (
+ retryAfterSeconds: number | undefined,
+ _defaultDelayMs: number
+ ) => {
+ // Use a short delay for tests to avoid timeouts
+ const delayMs = retryAfterSeconds ? retryAfterSeconds * 1000 : 10; // 10ms instead of _defaultDelayMs
+ const delaySeconds = retryAfterSeconds || delayMs / 1000;
+ return { delayMs, delaySeconds };
+ },
+ };
+});
// Mock logger module
vi.mock('../../src/logger', () => ({
@@ -92,10 +97,8 @@ vi.mock('../../src/envs-constants', () => ({
}));
// Import after mocks are set up
-import {
- importUsers,
- normalizeErrorMessage,
-} from '../../src/migrate/import-users';
+import { importUsers } from '../../src/migrate/import-users';
+import { normalizeErrorMessage } from '../../src/lib';
import * as logger from '../../src/logger';
describe('importUsers', () => {
diff --git a/tests/transformers.test.ts b/tests/transformers.test.ts
index e519f63..413561c 100644
--- a/tests/transformers.test.ts
+++ b/tests/transformers.test.ts
@@ -10,21 +10,24 @@ import * as path from 'path';
*/
const TRANSFORMERS_DIR = path.join(process.cwd(), 'src/transformers');
-const INDEX_FILE = path.join(TRANSFORMERS_DIR, 'index.ts');
+const REGISTRY_FILE = path.join(TRANSFORMERS_DIR, 'registry.ts');
/**
- * Gets all transformer files (excluding index.ts)
+ * Gets all transformer files (excluding index.ts and registry.ts)
*/
function getTransformerFiles(): string[] {
const files = fs.readdirSync(TRANSFORMERS_DIR);
return files
- .filter((file) => file.endsWith('.ts') && file !== 'index.ts')
+ .filter(
+ (file) =>
+ file.endsWith('.ts') && file !== 'index.ts' && file !== 'registry.ts'
+ )
.map((file) => file.replace('.ts', ''));
}
describe('transformer registration', () => {
const transformerFiles = getTransformerFiles();
- const indexContent = fs.readFileSync(INDEX_FILE, 'utf-8');
+ const registryContent = fs.readFileSync(REGISTRY_FILE, 'utf-8');
test('should have at least one transformer file', () => {
expect(transformerFiles.length).toBeGreaterThan(0);
@@ -57,8 +60,8 @@ describe('transformer registration', () => {
);
expect(
- importPattern.test(indexContent),
- `${fileName} must be imported in index.ts. Add: import ${fileName}Transformer from './${fileName}';`
+ importPattern.test(registryContent),
+ `${fileName} must be imported in registry.ts. Add: import ${fileName}Transformer from './${fileName}';`
).toBe(true);
});
});
@@ -97,16 +100,16 @@ describe('transformer registration', () => {
// Check the variable is in the transformers array in index.ts
const arrayPattern = new RegExp(`\\b${variableName}\\b`);
expect(
- arrayPattern.test(indexContent),
- `Transformer "${variableName}" with key "${transformerKey}" from ${fileName}.ts must be added to the transformers array in index.ts`
+ arrayPattern.test(registryContent),
+ `Transformer "${variableName}" with key "${transformerKey}" from ${fileName}.ts must be added to the transformers array in registry.ts`
).toBe(true);
}
);
});
test('transformers array has correct number of entries', () => {
- // Count the number of imports in index.ts (excluding type imports)
- const importMatches = indexContent.match(
+ // Count the number of imports in registry.ts (excluding type imports)
+ const importMatches = registryContent.match(
/import\s+\w+\s+from\s+['"]\.\/\w+['"]/g
);
const importCount = importMatches ? importMatches.length : 0;
@@ -114,7 +117,7 @@ describe('transformer registration', () => {
expect(
importCount,
`Expected ${transformerFiles.length} transformer imports but found ${importCount}. ` +
- `Make sure all transformer files are imported in index.ts`
+ `Make sure all transformer files are imported in registry.ts`
).toBe(transformerFiles.length);
});
diff --git a/tests/utils.test.ts b/tests/utils.test.ts
index e0527d7..07bd753 100644
--- a/tests/utils.test.ts
+++ b/tests/utils.test.ts
@@ -6,7 +6,7 @@ import {
getFileType,
getRetryDelay,
tryCatch,
-} from '../src/utils';
+} from '../src/lib';
import path from 'path';
describe('getDateTimeStamp', () => {