diff --git a/.changeset/better-auth-fmodata-adapter.md b/.changeset/better-auth-fmodata-adapter.md
new file mode 100644
index 00000000..b473380c
--- /dev/null
+++ b/.changeset/better-auth-fmodata-adapter.md
@@ -0,0 +1,9 @@
+---
+"@proofkit/better-auth": minor
+"@proofkit/fmodata": patch
+---
+
+BREAKING(@proofkit/better-auth): Use fmodata Database object instead of raw OData config.
+Config now requires `database` (fmodata Database instance) instead of
+`odata: { serverUrl, auth, database }`.
+Enables fetch override via FMServerConnection's fetchClientOptions.
diff --git a/apps/docs/content/docs/better-auth/installation.mdx b/apps/docs/content/docs/better-auth/installation.mdx
index 691cc3e0..3f1bddba 100644
--- a/apps/docs/content/docs/better-auth/installation.mdx
+++ b/apps/docs/content/docs/better-auth/installation.mdx
@@ -24,36 +24,38 @@ This automated setup expects you to have Next.js and shadcn/ui set up in your pr
Run the following command to add the necessary packages and config files to your project. You will not need to follow the better-auth installation guide.
-
+
# Step 1b: Manual Setup
Follow the [Better-Auth installation guide](https://better-auth.com/docs/installation) to get started in your app, but come back here for special instructions for anything related to your Database Setup or schema migrations.
### Database Setup
-Ensure you have the @proofkit/better-auth package installed in your app.
-
+Ensure you have the @proofkit/better-auth and @proofkit/fmodata packages installed in your app.
+
Configure your database connection in your `auth.ts` file. Be sure to set these value secrets in your environment variables. The credentials you use here need `fmodata` permissions enabled, and read/write access to the better-auth tables.
```ts title="auth.ts"
import { betterAuth } from "better-auth";
+import { FMServerConnection } from "@proofkit/fmodata";
import { FileMakerAdapter } from "@proofkit/better-auth";
+const connection = new FMServerConnection({
+ serverUrl: process.env.FM_SERVER_URL!,
+ auth: {
+ // option 1: username/password credentials
+ username: process.env.FM_USERNAME!,
+ password: process.env.FM_PASSWORD!,
+
+ // option 2: Data API key (OttoFMS 4.11+, OData enabled for the key)
+ // apiKey: process.env.OTTO_API_KEY!,
+ },
+});
+
+const db = connection.database(process.env.FM_DATABASE!);
+
export const auth = betterAuth({
- database: FileMakerAdapter({
- odata: {
- serverUrl: process.env.FM_SERVER_URL,
- auth: {
- // option 1: username/password credentials
- username: process.env.FM_USERNAME,
- password: process.env.FM_PASSWORD,
-
- // option 2: Data API key (OttoFMS 4.11+, OData enabled for the key)
- // apiKey: process.env.OTTO_API_KEY,
- },
- database: process.env.FM_DATABASE,
- },
- }),
+ database: FileMakerAdapter({ database: db }),
// ...rest of your config
});
```
diff --git a/packages/better-auth/package.json b/packages/better-auth/package.json
index fe213219..6b0e5986 100644
--- a/packages/better-auth/package.json
+++ b/packages/better-auth/package.json
@@ -51,6 +51,7 @@
"@babel/preset-react": "^7.28.5",
"@babel/preset-typescript": "^7.28.5",
"@commander-js/extra-typings": "^14.0.0",
+ "@proofkit/fmodata": "workspace:*",
"@tanstack/vite-config": "^0.2.1",
"better-auth": "^1.4.11",
"c12": "^3.3.3",
@@ -58,17 +59,13 @@
"commander": "^14.0.2",
"dotenv": "^16.6.1",
"fs-extra": "^11.3.3",
- "neverthrow": "^8.2.0",
- "odata-query": "^8.0.7",
"prompts": "^2.4.2",
- "vite": "^6.4.1",
- "zod": "^4.3.5"
+ "vite": "^6.4.1"
},
"devDependencies": {
"@types/fs-extra": "^11.0.4",
"@types/prompts": "^2.4.9",
"@vitest/ui": "^3.2.4",
- "fm-odata-client": "^3.0.2",
"publint": "^0.3.16",
"typescript": "^5.9.3",
"vitest": "^4.0.17"
diff --git a/packages/better-auth/src/adapter.ts b/packages/better-auth/src/adapter.ts
index 701f3cb3..039397a7 100644
--- a/packages/better-auth/src/adapter.ts
+++ b/packages/better-auth/src/adapter.ts
@@ -1,19 +1,7 @@
/** biome-ignore-all lint/suspicious/noExplicitAny: library code */
+import type { Database } from "@proofkit/fmodata";
import { logger } from "better-auth";
import { type CleanedWhere, createAdapter, type DBAdapterDebugLogOption } from "better-auth/adapters";
-import buildQuery from "odata-query";
-import { prettifyError, z } from "zod/v4";
-import { createRawFetch, type FmOdataConfig } from "./odata";
-
-const configSchema = z.object({
- debugLogs: z.unknown().optional(),
- usePlural: z.boolean().optional(),
- odata: z.object({
- serverUrl: z.url(),
- auth: z.union([z.object({ username: z.string(), password: z.string() }), z.object({ apiKey: z.string() })]),
- database: z.string().endsWith(".fmp12"),
- }),
-});
export interface FileMakerAdapterConfig {
/**
@@ -24,27 +12,12 @@ export interface FileMakerAdapterConfig {
* If the table names in the schema are plural.
*/
usePlural?: boolean;
-
/**
- * Connection details for the FileMaker server.
+ * The fmodata Database instance to use for all OData requests.
*/
- odata: FmOdataConfig;
-}
-
-export interface AdapterOptions {
- config: FileMakerAdapterConfig;
+ database: Database;
}
-const defaultConfig: Required = {
- debugLogs: false,
- usePlural: false,
- odata: {
- serverUrl: "",
- auth: { username: "", password: "" },
- database: "",
- },
-};
-
// Regex patterns for field validation and ISO date detection
const FIELD_SPECIAL_CHARS_REGEX = /[\s_]/;
const ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?Z?$/;
@@ -155,41 +128,59 @@ export function parseWhere(where?: CleanedWhere[]): string {
return clauses.join(" ");
}
-export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig) => {
- const parsed = configSchema.loose().safeParse(_config);
+/**
+ * Build an OData query string from parameters.
+ */
+function buildQueryString(params: {
+ top?: number;
+ skip?: number;
+ filter?: string;
+ orderBy?: string;
+ select?: string[];
+}): string {
+ const parts: string[] = [];
+ if (params.top !== undefined) {
+ parts.push(`$top=${params.top}`);
+ }
+ if (params.skip !== undefined) {
+ parts.push(`$skip=${params.skip}`);
+ }
+ if (params.filter) {
+ parts.push(`$filter=${encodeURIComponent(params.filter)}`);
+ }
+ if (params.orderBy) {
+ parts.push(`$orderby=${encodeURIComponent(params.orderBy)}`);
+ }
+ if (params.select?.length) {
+ parts.push(`$select=${params.select.map(encodeURIComponent).join(",")}`);
+ }
+ return parts.length > 0 ? `?${parts.join("&")}` : "";
+}
- if (!parsed.success) {
- throw new Error(`Invalid configuration: ${prettifyError(parsed.error)}`);
+export const FileMakerAdapter = (config: FileMakerAdapterConfig) => {
+ if (!config.database || typeof config.database !== "object") {
+ throw new Error("FileMakerAdapter requires a `database` (fmodata Database instance).");
}
- const config = parsed.data;
- const { fetch } = createRawFetch({
- ...config.odata,
- logging: config.debugLogs ? "verbose" : "none",
- });
+ const db = config.database;
const adapterFactory = createAdapter({
config: {
adapterId: "filemaker",
adapterName: "FileMaker",
- usePlural: config.usePlural ?? false, // Whether the table names in the schema are plural.
- debugLogs: config.debugLogs ?? false, // Whether to enable debug logs.
- supportsJSON: false, // Whether the database supports JSON. (Default: false)
- supportsDates: false, // Whether the database supports dates. (Default: true)
- supportsBooleans: false, // Whether the database supports booleans. (Default: true)
- supportsNumericIds: false, // Whether the database supports auto-incrementing numeric IDs. (Default: true)
+ usePlural: config.usePlural ?? false,
+ debugLogs: config.debugLogs ?? false,
+ supportsJSON: false,
+ supportsDates: false,
+ supportsBooleans: false,
+ supportsNumericIds: false,
},
adapter: () => {
return {
create: async ({ data, model }) => {
- if (model === "session") {
- console.log("session", data);
- }
-
- const result = await fetch(`/${model}`, {
+ const result = await db._makeRequest>(`/${model}`, {
method: "POST",
- body: data,
- output: z.looseObject({ id: z.string() }),
+ body: JSON.stringify(data),
});
if (result.error) {
@@ -202,15 +193,12 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
const filter = parseWhere(where);
logger.debug("$filter", filter);
- const query = buildQuery({
+ const query = buildQueryString({
filter: filter.length > 0 ? filter : undefined,
});
- const result = await fetch(`/${model}/$count${query}`, {
- method: "GET",
- output: z.object({ value: z.number() }),
- });
- if (!result.data) {
+ const result = await db._makeRequest<{ value: number }>(`/${model}/$count${query}`);
+ if (result.error) {
throw new Error("Failed to count records");
}
return (result.data?.value as any) ?? 0;
@@ -219,15 +207,12 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
const filter = parseWhere(where);
logger.debug("$filter", filter);
- const query = buildQuery({
+ const query = buildQueryString({
top: 1,
filter: filter.length > 0 ? filter : undefined,
});
- const result = await fetch(`/${model}${query}`, {
- method: "GET",
- output: z.object({ value: z.array(z.any()) }),
- });
+ const result = await db._makeRequest<{ value: any[] }>(`/${model}${query}`);
if (result.error) {
throw new Error("Failed to find record");
}
@@ -237,7 +222,7 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
const filter = parseWhere(where);
logger.debug("FIND MANY", { where, filter });
- const query = buildQuery({
+ const query = buildQueryString({
top: limit,
skip: offset,
orderBy: sortBy ? `${sortBy.field} ${sortBy.direction ?? "asc"}` : undefined,
@@ -245,10 +230,7 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
});
logger.debug("QUERY", query);
- const result = await fetch(`/${model}${query}`, {
- method: "GET",
- output: z.object({ value: z.array(z.any()) }),
- });
+ const result = await db._makeRequest<{ value: any[] }>(`/${model}${query}`);
logger.debug("RESULT", result);
if (result.error) {
@@ -259,54 +241,44 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
},
delete: async ({ model, where }) => {
const filter = parseWhere(where);
- console.log("DELETE", { model, where, filter });
logger.debug("$filter", filter);
// Find a single id matching the filter
- const query = buildQuery({
+ const query = buildQueryString({
top: 1,
select: [`"id"`],
filter: filter.length > 0 ? filter : undefined,
});
- const toDelete = await fetch(`/${model}${query}`, {
- method: "GET",
- output: z.object({ value: z.array(z.object({ id: z.string() })) }),
- });
+ const toDelete = await db._makeRequest<{ value: { id: string }[] }>(`/${model}${query}`);
const id = toDelete.data?.value?.[0]?.id;
if (!id) {
- // Nothing to delete
return;
}
- const result = await fetch(`/${model}('${id}')`, {
+ const result = await db._makeRequest(`/${model}('${id}')`, {
method: "DELETE",
});
if (result.error) {
- console.log("DELETE ERROR", result.error);
throw new Error("Failed to delete record");
}
},
deleteMany: async ({ model, where }) => {
const filter = parseWhere(where);
- console.log("DELETE MANY", { model, where, filter });
// Find all ids matching the filter
- const query = buildQuery({
+ const query = buildQueryString({
select: [`"id"`],
filter: filter.length > 0 ? filter : undefined,
});
- const rows = await fetch(`/${model}${query}`, {
- method: "GET",
- output: z.object({ value: z.array(z.object({ id: z.string() })) }),
- });
+ const rows = await db._makeRequest<{ value: { id: string }[] }>(`/${model}${query}`);
const ids = rows.data?.value?.map((r: any) => r.id) ?? [];
let deleted = 0;
for (const id of ids) {
- const res = await fetch(`/${model}('${id}')`, {
+ const res = await db._makeRequest(`/${model}('${id}')`, {
method: "DELETE",
});
if (!res.error) {
@@ -319,16 +291,14 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
const filter = parseWhere(where);
logger.debug("UPDATE", { model, where, update });
logger.debug("$filter", filter);
+
// Find one id to update
- const query = buildQuery({
+ const query = buildQueryString({
select: [`"id"`],
filter: filter.length > 0 ? filter : undefined,
});
- const existing = await fetch(`/${model}${query}`, {
- method: "GET",
- output: z.object({ value: z.array(z.object({ id: z.string() })) }),
- });
+ const existing = await db._makeRequest<{ value: { id: string }[] }>(`/${model}${query}`);
logger.debug("EXISTING", existing.data);
const id = existing.data?.value?.[0]?.id;
@@ -336,9 +306,9 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
return null;
}
- const patchRes = await fetch(`/${model}('${id}')`, {
+ const patchRes = await db._makeRequest(`/${model}('${id}')`, {
method: "PATCH",
- body: update,
+ body: JSON.stringify(update),
});
logger.debug("PATCH RES", patchRes.data);
if (patchRes.error) {
@@ -346,32 +316,27 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
}
// Read back the updated record
- const readBack = await fetch(`/${model}('${id}')`, {
- method: "GET",
- output: z.record(z.string(), z.unknown()),
- });
+ const readBack = await db._makeRequest>(`/${model}('${id}')`);
logger.debug("READ BACK", readBack.data);
return (readBack.data as any) ?? null;
},
updateMany: async ({ model, where, update }) => {
const filter = parseWhere(where);
+
// Find all ids matching the filter
- const query = buildQuery({
+ const query = buildQueryString({
select: [`"id"`],
filter: filter.length > 0 ? filter : undefined,
});
- const rows = await fetch(`/${model}${query}`, {
- method: "GET",
- output: z.object({ value: z.array(z.object({ id: z.string() })) }),
- });
+ const rows = await db._makeRequest<{ value: { id: string }[] }>(`/${model}${query}`);
const ids = rows.data?.value?.map((r: any) => r.id) ?? [];
let updated = 0;
for (const id of ids) {
- const res = await fetch(`/${model}('${id}')`, {
+ const res = await db._makeRequest(`/${model}('${id}')`, {
method: "PATCH",
- body: update,
+ body: JSON.stringify(update),
});
if (!res.error) {
updated++;
@@ -383,7 +348,15 @@ export const FileMakerAdapter = (_config: FileMakerAdapterConfig = defaultConfig
},
});
- // Expose the FileMaker config for CLI access
- (adapterFactory as any).filemakerConfig = config as FileMakerAdapterConfig;
- return adapterFactory;
+ // Expose the Database instance for CLI access.
+ // Set on both the factory function (for pre-getAdapter extraction)
+ // and the returned adapter (for post-getAdapter extraction).
+ const originalFactory = adapterFactory;
+ const wrappedFactory = ((options: unknown) => {
+ const adapter = (originalFactory as (opts: unknown) => Record)(options);
+ adapter.database = db;
+ return adapter;
+ }) as typeof adapterFactory;
+ (wrappedFactory as unknown as { database: Database }).database = db;
+ return wrappedFactory;
};
diff --git a/packages/better-auth/src/cli/index.ts b/packages/better-auth/src/cli/index.ts
index f72f9662..586737f5 100644
--- a/packages/better-auth/src/cli/index.ts
+++ b/packages/better-auth/src/cli/index.ts
@@ -1,14 +1,14 @@
#!/usr/bin/env node --no-warnings
import { Command } from "@commander-js/extra-typings";
+import type { Database } from "@proofkit/fmodata";
+import { FMServerConnection } from "@proofkit/fmodata";
import { logger } from "better-auth";
import { getAdapter, getSchema } from "better-auth/db";
import chalk from "chalk";
import fs from "fs-extra";
import prompts from "prompts";
-import type { FileMakerAdapterConfig } from "../adapter";
import { getConfig } from "../better-auth-cli/utils/get-config";
import { executeMigration, planMigration, prettyPrintMigrationPlan } from "../migrate";
-import { createRawFetch } from "../odata";
import "dotenv/config";
async function main() {
@@ -52,21 +52,55 @@ async function main() {
const betterAuthSchema = getSchema(config);
- const adapterConfig = (adapter as unknown as { filemakerConfig: FileMakerAdapterConfig }).filemakerConfig;
- const { fetch } = createRawFetch({
- ...adapterConfig.odata,
- auth:
- // If the username and password are provided in the CLI, use them to authenticate instead of what's in the config file.
- options.username && options.password
- ? {
- username: options.username,
- password: options.password,
- }
- : adapterConfig.odata.auth,
- logging: "verbose", // Enable logging for CLI operations
- });
+ // Extract Database from the adapter factory or resolved adapter.
+ // config.database is the FileMakerAdapter factory function (has .database set on it).
+ // adapter is the resolved adapter after getAdapter() calls the factory (also has .database).
+ // Try both: adapter first (post-call), then config.database (pre-call / factory function).
+ const configDb =
+ (adapter as unknown as { database?: Database }).database ??
+ (config.database as unknown as { database?: Database } | undefined)?.database;
+ if (!configDb || typeof configDb !== "object" || !("schema" in configDb)) {
+ logger.error(
+ "Could not extract Database instance from adapter. Ensure your auth.ts uses FileMakerAdapter with an fmodata Database.",
+ );
+ process.exit(1);
+ }
+ let db: Database = configDb;
+
+ // Extract database name and server URL for display
+ const dbName: string = (configDb as unknown as { _getDatabaseName: string })
+ ._getDatabaseName;
+ const baseUrl: string | undefined = (
+ configDb as unknown as { context?: { _getBaseUrl?: () => string } }
+ ).context?._getBaseUrl?.();
+ const serverUrl = baseUrl ? new URL(baseUrl).origin : undefined;
+
+ // If CLI credential overrides are provided, construct a new connection
+ if (options.username && options.password) {
+ if (!dbName) {
+ logger.error("Could not determine database filename from adapter config.");
+ process.exit(1);
+ }
- const migrationPlan = await planMigration(fetch, betterAuthSchema, adapterConfig.odata.database);
+ if (!baseUrl) {
+ logger.error(
+ "Could not determine server URL from adapter config. Ensure your auth.ts uses FMServerConnection.",
+ );
+ process.exit(1);
+ }
+
+ const connection = new FMServerConnection({
+ serverUrl: serverUrl as string,
+ auth: {
+ username: options.username,
+ password: options.password,
+ },
+ });
+
+ db = connection.database(dbName);
+ }
+
+ const migrationPlan = await planMigration(db, betterAuthSchema);
if (migrationPlan.length === 0) {
logger.info("No changes to apply. Database is up to date.");
@@ -74,7 +108,7 @@ async function main() {
}
if (!options.yes) {
- prettyPrintMigrationPlan(migrationPlan);
+ prettyPrintMigrationPlan(migrationPlan, { serverUrl, fileName: dbName });
if (migrationPlan.length > 0) {
console.log(chalk.gray("💡 Tip: You can use the --yes flag to skip this confirmation."));
@@ -91,12 +125,18 @@ async function main() {
}
}
- await executeMigration(fetch, migrationPlan);
-
- logger.info("Migration applied successfully.");
+ try {
+ await executeMigration(db, migrationPlan);
+ logger.info("Migration applied successfully.");
+ } catch {
+ process.exit(1);
+ }
});
await program.parseAsync(process.argv);
process.exit(0);
}
-main().catch(console.error);
+main().catch((err) => {
+ logger.error(err.message ?? err);
+ process.exit(1);
+});
diff --git a/packages/better-auth/src/migrate.ts b/packages/better-auth/src/migrate.ts
index 6f022b0b..02861fe0 100644
--- a/packages/better-auth/src/migrate.ts
+++ b/packages/better-auth/src/migrate.ts
@@ -1,8 +1,7 @@
+import type { Database, Field, Metadata } from "@proofkit/fmodata";
+import { isFMODataError, isODataError } from "@proofkit/fmodata";
import type { DBFieldAttribute } from "better-auth/db";
import chalk from "chalk";
-import type { Metadata } from "fm-odata-client";
-import z from "zod/v4";
-import type { createRawFetch } from "./odata";
/** Schema type returned by better-auth's getSchema function */
type BetterAuthSchema = Record; order: number }>;
@@ -17,34 +16,29 @@ function normalizeBetterAuthFieldType(fieldType: unknown): string {
return String(fieldType);
}
-export async function getMetadata(fetch: ReturnType["fetch"], databaseName: string) {
- console.log("getting metadata...");
- const result = await fetch("/$metadata", {
- method: "GET",
- headers: { accept: "application/json" },
- output: z
- .looseObject({
- $Version: z.string(),
- "@ServerVersion": z.string(),
- })
- .or(z.null())
- .catch(null),
- });
-
- if (result.error) {
- console.error("Failed to get metadata:", result.error);
+export async function getMetadata(db: Database): Promise {
+ try {
+ const metadata = await db.getMetadata({ format: "json" });
+ return metadata;
+ } catch (err) {
+ console.error(chalk.red("Failed to get metadata:"), formatError(err));
return null;
}
+}
- return (result.data?.[databaseName] ?? null) as Metadata | null;
+/** Map a better-auth field type string to an fmodata Field type */
+function mapFieldType(t: string): "string" | "numeric" | "timestamp" {
+ if (t.includes("boolean") || t.includes("number")) {
+ return "numeric";
+ }
+ if (t.includes("date")) {
+ return "timestamp";
+ }
+ return "string";
}
-export async function planMigration(
- fetch: ReturnType["fetch"],
- betterAuthSchema: BetterAuthSchema,
- databaseName: string,
-): Promise {
- const metadata = await getMetadata(fetch, databaseName);
+export async function planMigration(db: Database, betterAuthSchema: BetterAuthSchema): Promise {
+ const metadata = await getMetadata(db);
// Build a map from entity set name to entity type key
const entitySetToType: Record = {};
@@ -71,9 +65,9 @@ export async function planMigration(
typeof fieldValue === "object" && fieldValue !== null && "$Type" in fieldValue,
)
.map(([fieldKey, fieldValue]) => {
- let type = "varchar";
+ let type = "string";
if (fieldValue.$Type === "Edm.String") {
- type = "varchar";
+ type = "string";
} else if (fieldValue.$Type === "Edm.DateTimeOffset") {
type = "timestamp";
} else if (
@@ -99,30 +93,21 @@ export async function planMigration(
.sort((a, b) => (a[1].order ?? 0) - (b[1].order ?? 0))
.map(([key, value]) => ({
...value,
- modelName: key, // Use the key as modelName since getSchema uses table names as keys
+ modelName: key,
}));
const migrationPlan: MigrationPlan = [];
for (const baTable of baTables) {
const fields: FmField[] = Object.entries(baTable.fields).map(([key, field]) => {
- // Better Auth's FieldType can be a string literal union or arrays.
- // Normalize it to a string so our FM mapping logic remains stable.
- // Use .includes() for all checks to handle array types like ["boolean", "null"] → "boolean|null"
const t = normalizeBetterAuthFieldType(field.type);
- let type: "varchar" | "numeric" | "timestamp" = "varchar";
- if (t.includes("boolean") || t.includes("number")) {
- type = "numeric";
- } else if (t.includes("date")) {
- type = "timestamp";
- }
+ const type = mapFieldType(t);
return {
name: field.fieldName ?? key,
type,
};
});
- // get existing table or create it
const tableExists = baTable.modelName in existingTables;
if (tableExists) {
@@ -134,7 +119,6 @@ export async function planMigration(
},
{} as Record,
);
- // Warn about type mismatches (optional, not in plan)
for (const field of fields) {
if (existingFields.includes(field.name) && existingFieldMap[field.name] !== field.type) {
console.warn(
@@ -157,7 +141,7 @@ export async function planMigration(
fields: [
{
name: "id",
- type: "varchar",
+ type: "string",
primary: true,
unique: true,
},
@@ -170,106 +154,97 @@ export async function planMigration(
return migrationPlan;
}
-export async function executeMigration(
- fetch: ReturnType["fetch"],
- migrationPlan: MigrationPlan,
-) {
+export async function executeMigration(db: Database, migrationPlan: MigrationPlan) {
for (const step of migrationPlan) {
+ // Convert plan fields to fmodata Field type
+ const fmodataFields: Field[] = step.fields.map((f) => ({
+ name: f.name,
+ type: f.type,
+ ...(f.primary ? { primary: true } : {}),
+ ...(f.unique ? { unique: true } : {}),
+ }));
+
if (step.operation === "create") {
console.log("Creating table:", step.tableName);
- const result = await fetch("/FileMaker_Tables", {
- method: "POST",
- body: {
- tableName: step.tableName,
- fields: step.fields,
- },
- });
-
- if (result.error) {
- console.error(`Failed to create table ${step.tableName}:`, result.error);
- throw new Error(`Migration failed: ${result.error}`);
+ try {
+ await db.schema.createTable(step.tableName, fmodataFields);
+ } catch (error) {
+ throw migrationError("create", step.tableName, error);
}
} else if (step.operation === "update") {
console.log("Adding fields to table:", step.tableName);
- const result = await fetch(`/FileMaker_Tables/${step.tableName}`, {
- method: "PATCH",
- body: { fields: step.fields },
- });
-
- if (result.error) {
- console.error(`Failed to update table ${step.tableName}:`, result.error);
- throw new Error(`Migration failed: ${result.error}`);
+ try {
+ await db.schema.addFields(step.tableName, fmodataFields);
+ } catch (error) {
+ throw migrationError("update", step.tableName, error);
}
}
}
}
-const genericFieldSchema = z.object({
- name: z.string(),
- nullable: z.boolean().optional(),
- primary: z.boolean().optional(),
- unique: z.boolean().optional(),
- global: z.boolean().optional(),
- repetitions: z.number().optional(),
-});
-
-const stringFieldSchema = genericFieldSchema.extend({
- type: z.literal("varchar"),
- maxLength: z.number().optional(),
- default: z.enum(["USER", "USERNAME", "CURRENT_USER"]).optional(),
-});
-
-const numericFieldSchema = genericFieldSchema.extend({
- type: z.literal("numeric"),
-});
-
-const dateFieldSchema = genericFieldSchema.extend({
- type: z.literal("date"),
- default: z.enum(["CURRENT_DATE", "CURDATE"]).optional(),
-});
-
-const timeFieldSchema = genericFieldSchema.extend({
- type: z.literal("time"),
- default: z.enum(["CURRENT_TIME", "CURTIME"]).optional(),
-});
-
-const timestampFieldSchema = genericFieldSchema.extend({
- type: z.literal("timestamp"),
- default: z.enum(["CURRENT_TIMESTAMP", "CURTIMESTAMP"]).optional(),
-});
+interface FmField {
+ name: string;
+ type: "string" | "numeric" | "timestamp";
+ primary?: boolean;
+ unique?: boolean;
+}
-const containerFieldSchema = genericFieldSchema.extend({
- type: z.literal("container"),
- externalSecurePath: z.string().optional(),
-});
+const migrationStepTypes = ["create", "update"] as const;
+interface MigrationStep {
+ tableName: string;
+ operation: (typeof migrationStepTypes)[number];
+ fields: FmField[];
+}
-const fieldSchema = z.discriminatedUnion("type", [
- stringFieldSchema,
- numericFieldSchema,
- dateFieldSchema,
- timeFieldSchema,
- timestampFieldSchema,
- containerFieldSchema,
-]);
+export type MigrationPlan = MigrationStep[];
-type FmField = z.infer;
+function formatError(error: unknown): string {
+ if (isODataError(error)) {
+ const code = error.code ? ` (${error.code})` : "";
+ return `${error.message}${code}`;
+ }
+ if (isFMODataError(error)) {
+ return error.message;
+ }
+ if (error instanceof Error) {
+ return error.message;
+ }
+ return String(error);
+}
-const migrationPlanSchema = z
- .object({
- tableName: z.string(),
- operation: z.enum(["create", "update"]),
- fields: z.array(fieldSchema),
- })
- .array();
+function migrationError(operation: string, tableName: string, error: unknown): Error {
+ const action = operation === "create" ? "create table" : "update table";
+ const base = `Failed to ${action} "${tableName}"`;
-export type MigrationPlan = z.infer;
+ if (isODataError(error) && error.code === "207") {
+ console.error(
+ chalk.red(`\n${base}: Cannot modify schema.`),
+ chalk.yellow("\nThe account used does not have schema modification privileges."),
+ chalk.gray(
+ "\nUse --username and --password to provide Full Access credentials, or grant schema modification privileges to the current account.",
+ ),
+ );
+ } else {
+ console.error(chalk.red(`\n${base}:`), formatError(error));
+ }
+ return new Error(`Migration failed: ${formatError(error)}`);
+}
-export function prettyPrintMigrationPlan(migrationPlan: MigrationPlan) {
+export function prettyPrintMigrationPlan(
+ migrationPlan: MigrationPlan,
+ target?: { serverUrl?: string; fileName?: string },
+) {
if (!migrationPlan.length) {
console.log("No changes to apply. Database is up to date.");
return;
}
console.log(chalk.bold.green("Migration plan:"));
+ if (target?.serverUrl || target?.fileName) {
+ const parts: string[] = [];
+ if (target.fileName) parts.push(chalk.cyan(target.fileName));
+ if (target.serverUrl) parts.push(chalk.gray(target.serverUrl));
+ console.log(` Target: ${parts.join(" @ ")}`);
+ }
for (const step of migrationPlan) {
const emoji = step.operation === "create" ? "✅" : "✏️";
console.log(
diff --git a/packages/better-auth/src/odata/index.ts b/packages/better-auth/src/odata/index.ts
deleted file mode 100644
index eda9ce9b..00000000
--- a/packages/better-auth/src/odata/index.ts
+++ /dev/null
@@ -1,219 +0,0 @@
-/** biome-ignore-all lint/suspicious/noExplicitAny: library code */
-import { logger as betterAuthLogger } from "better-auth";
-import { err, ok, type Result } from "neverthrow";
-import type { z } from "zod/v4";
-
-interface BasicAuthCredentials {
- username: string;
- password: string;
-}
-interface OttoAPIKeyAuth {
- apiKey: string;
-}
-type ODataAuth = BasicAuthCredentials | OttoAPIKeyAuth;
-
-export interface FmOdataConfig {
- serverUrl: string;
- auth: ODataAuth;
- database: string;
- logging?: true | "verbose" | "none";
-}
-
-export function validateUrl(input: string): Result {
- try {
- const url = new URL(input);
- return ok(url);
- } catch (error) {
- return err(error);
- }
-}
-
-export function createRawFetch(args: FmOdataConfig) {
- const result = validateUrl(args.serverUrl);
-
- if (result.isErr()) {
- throw new Error("Invalid server URL");
- }
-
- let baseURL = result.value.origin;
- if ("apiKey" in args.auth) {
- baseURL += "/otto";
- }
- baseURL += `/fmi/odata/v4/${args.database}`;
-
- // Create authentication headers
- const authHeaders: Record = {};
- if ("apiKey" in args.auth) {
- authHeaders.Authorization = `Bearer ${args.auth.apiKey}`;
- } else {
- const credentials = btoa(`${args.auth.username}:${args.auth.password}`);
- authHeaders.Authorization = `Basic ${credentials}`;
- }
-
- // Enhanced fetch function with body handling, validation, and structured responses
- const wrappedFetch = async (
- input: string | URL | Request,
- options?: Omit & {
- body?: any; // Allow any type for body
- output?: z.ZodSchema; // Optional schema for validation
- },
- ): Promise<{ data?: TOutput; error?: string; response?: Response }> => {
- try {
- let url: string;
-
- // Handle different input types
- if (typeof input === "string") {
- // If it's already a full URL, use as-is, otherwise prepend baseURL
- url = input.startsWith("http") ? input : `${baseURL}${input.startsWith("/") ? input : `/${input}`}`;
- } else if (input instanceof URL) {
- url = input.toString();
- } else if (input instanceof Request) {
- url = input.url;
- } else {
- url = String(input);
- }
-
- // Handle body serialization
- let processedBody = options?.body;
- if (
- processedBody &&
- typeof processedBody === "object" &&
- !(processedBody instanceof FormData) &&
- !(processedBody instanceof URLSearchParams) &&
- !(processedBody instanceof ReadableStream)
- ) {
- processedBody = JSON.stringify(processedBody);
- }
-
- // Merge headers
- const headers = {
- "Content-Type": "application/json",
- ...authHeaders,
- ...(options?.headers || {}),
- };
-
- const requestInit: RequestInit = {
- ...options,
- headers,
- body: processedBody,
- };
-
- // Optional logging
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", `${requestInit.method || "GET"} ${url}`);
- if (requestInit.body) {
- betterAuthLogger.info("raw-fetch", "Request body:", requestInit.body);
- }
- }
-
- const response = await fetch(url, requestInit);
-
- // Optional logging for response details
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", `Response status: ${response.status} ${response.statusText}`);
- betterAuthLogger.info("raw-fetch", "Response headers:", Object.fromEntries(response.headers.entries()));
- }
-
- // Check if response is ok
- if (!response.ok) {
- const errorText = await response.text().catch(() => "Unknown error");
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.error("raw-fetch", `HTTP Error ${response.status}: ${errorText}`);
- }
- return {
- error: `HTTP ${response.status}: ${errorText}`,
- response,
- };
- }
-
- // Parse response based on content type
- let responseData: any;
- const contentType = response.headers.get("content-type");
-
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", `Response content-type: ${contentType || "none"}`);
- }
-
- if (contentType?.includes("application/json")) {
- try {
- const responseText = await response.text();
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", `Raw response text: "${responseText}"`);
- betterAuthLogger.info("raw-fetch", `Response text length: ${responseText.length}`);
- }
-
- // Handle empty responses
- if (responseText.trim() === "") {
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", "Empty JSON response, returning null");
- }
- responseData = null;
- } else {
- responseData = JSON.parse(responseText);
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", "Successfully parsed JSON response");
- }
- }
- } catch (parseError) {
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.error("raw-fetch", "JSON parse error:", parseError);
- }
- return {
- error: `Failed to parse JSON response: ${parseError instanceof Error ? parseError.message : "Unknown parse error"}`,
- response,
- };
- }
- } else if (contentType?.includes("text/")) {
- // Handle text responses (text/plain, text/html, etc.)
- responseData = await response.text();
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", `Text response: "${responseData}"`);
- }
- } else {
- // For other content types, try to get text but don't fail if it's binary
- try {
- responseData = await response.text();
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", `Unknown content-type response as text: "${responseData}"`);
- }
- } catch {
- // If text parsing fails (e.g., binary data), return null
- responseData = null;
- if (args.logging === "verbose" || args.logging === true) {
- betterAuthLogger.info("raw-fetch", "Could not parse response as text, returning null");
- }
- }
- }
-
- // Validate output if schema provided
- if (options?.output) {
- const validation = options.output.safeParse(responseData);
- if (validation.success) {
- return {
- data: validation.data,
- response,
- };
- }
- return {
- error: `Validation failed: ${validation.error.message}`,
- response,
- };
- }
-
- // Return unvalidated data
- return {
- data: responseData as TOutput,
- response,
- };
- } catch (error) {
- return {
- error: error instanceof Error ? error.message : "Unknown error occurred",
- };
- }
- };
-
- return {
- baseURL,
- fetch: wrappedFetch,
- };
-}
diff --git a/packages/better-auth/tests/adapter.test.ts b/packages/better-auth/tests/adapter.test.ts
index 0f803ffb..af718e9e 100644
--- a/packages/better-auth/tests/adapter.test.ts
+++ b/packages/better-auth/tests/adapter.test.ts
@@ -2,32 +2,25 @@
* Unit tests for FileMaker adapter operations using mocked responses.
* These tests verify adapter behavior without requiring a live FileMaker server.
*/
-import { afterEach, describe, expect, it, vi } from "vitest";
+import type { Database } from "@proofkit/fmodata";
+import { describe, expect, it } from "vitest";
import { FileMakerAdapter } from "../src/adapter";
import { mockResponses } from "./fixtures/responses";
-import { createMockFetch, createMockFetchSequence } from "./utils/mock-fetch";
+import { createMockDatabase, createMockDatabaseSequence } from "./utils/mock-fetch";
-// Test adapter factory - creates adapter with test config
-function createTestAdapter() {
+// Test adapter factory - creates adapter with mock database
+function createTestAdapter(mockDb: unknown) {
return FileMakerAdapter({
- odata: {
- serverUrl: "https://api.example.com",
- auth: { apiKey: "test-api-key" },
- database: "test.fmp12",
- },
+ database: mockDb as Database,
debugLogs: false,
});
}
describe("FileMakerAdapter", () => {
- afterEach(() => {
- vi.unstubAllGlobals();
- });
-
describe("create", () => {
it("should create a record and return data with id", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["create-user"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["create-user"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.create({
model: "user",
@@ -43,8 +36,8 @@ describe("FileMakerAdapter", () => {
});
it("should create a session record", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["create-session"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["create-session"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.create({
model: "session",
@@ -62,8 +55,8 @@ describe("FileMakerAdapter", () => {
describe("findOne", () => {
it("should find a single record", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["find-one-user"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["find-one-user"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.findOne({
model: "user",
@@ -76,8 +69,8 @@ describe("FileMakerAdapter", () => {
});
it("should return null when no record found", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["find-one-user-not-found"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["find-one-user-not-found"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.findOne({
model: "user",
@@ -90,8 +83,8 @@ describe("FileMakerAdapter", () => {
describe("findMany", () => {
it("should find multiple records", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["find-many-users"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["find-many-users"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.findMany({
model: "user",
@@ -102,8 +95,8 @@ describe("FileMakerAdapter", () => {
});
it("should return empty array when no records found", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["find-many-users-empty"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["find-many-users-empty"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.findMany({
model: "user",
@@ -114,8 +107,8 @@ describe("FileMakerAdapter", () => {
});
it("should apply limit", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["find-many-with-limit"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["find-many-with-limit"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.findMany({
model: "user",
@@ -126,8 +119,8 @@ describe("FileMakerAdapter", () => {
});
it("should apply sort", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["find-many-sorted-desc"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["find-many-sorted-desc"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.findMany({
model: "user",
@@ -144,8 +137,8 @@ describe("FileMakerAdapter", () => {
describe("count", () => {
it("should count records", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["count-users"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["count-users"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.count({
model: "user",
@@ -158,15 +151,12 @@ describe("FileMakerAdapter", () => {
describe("update", () => {
it("should update a record and return updated data", async () => {
// Update requires: find record -> patch -> read back
- vi.stubGlobal(
- "fetch",
- createMockFetchSequence([
- mockResponses["update-find-user"],
- mockResponses["update-patch-user"],
- mockResponses["update-read-back-user"],
- ]),
- );
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabaseSequence([
+ mockResponses["update-find-user"],
+ mockResponses["update-patch-user"],
+ mockResponses["update-read-back-user"],
+ ]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.update({
model: "user",
@@ -180,8 +170,8 @@ describe("FileMakerAdapter", () => {
});
it("should return null when record to update not found", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["find-one-user-not-found"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["find-one-user-not-found"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.update({
model: "user",
@@ -196,11 +186,8 @@ describe("FileMakerAdapter", () => {
describe("delete", () => {
it("should delete a record", async () => {
// Delete requires: find record -> delete
- vi.stubGlobal(
- "fetch",
- createMockFetchSequence([mockResponses["delete-find-user"], mockResponses["delete-user"]]),
- );
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabaseSequence([mockResponses["delete-find-user"], mockResponses["delete-user"]]);
+ const adapter = createTestAdapter(mockDb)({});
// Should not throw
await adapter.delete({
@@ -210,8 +197,8 @@ describe("FileMakerAdapter", () => {
});
it("should do nothing when record to delete not found", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["delete-find-not-found"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["delete-find-not-found"]);
+ const adapter = createTestAdapter(mockDb)({});
// Should not throw
await adapter.delete({
@@ -224,15 +211,12 @@ describe("FileMakerAdapter", () => {
describe("deleteMany", () => {
it("should delete multiple records", async () => {
// DeleteMany requires: find all -> delete each
- vi.stubGlobal(
- "fetch",
- createMockFetchSequence([
- mockResponses["delete-many-find-users"],
- mockResponses["delete-user-123"],
- mockResponses["delete-user-456"],
- ]),
- );
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabaseSequence([
+ mockResponses["delete-many-find-users"],
+ mockResponses["delete-user-123"],
+ mockResponses["delete-user-456"],
+ ]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.deleteMany({
model: "user",
@@ -243,8 +227,8 @@ describe("FileMakerAdapter", () => {
});
it("should return 0 when no records to delete", async () => {
- vi.stubGlobal("fetch", createMockFetch(mockResponses["delete-find-not-found"]));
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabase(mockResponses["delete-find-not-found"]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.deleteMany({
model: "user",
@@ -258,15 +242,12 @@ describe("FileMakerAdapter", () => {
describe("updateMany", () => {
it("should update multiple records", async () => {
// UpdateMany requires: find all -> patch each
- vi.stubGlobal(
- "fetch",
- createMockFetchSequence([
- mockResponses["delete-many-find-users"], // reuse the find response
- mockResponses["update-patch-user"],
- mockResponses["update-patch-user"],
- ]),
- );
- const adapter = createTestAdapter()({});
+ const mockDb = createMockDatabaseSequence([
+ mockResponses["delete-many-find-users"], // reuse the find response
+ mockResponses["update-patch-user"],
+ mockResponses["update-patch-user"],
+ ]);
+ const adapter = createTestAdapter(mockDb)({});
const result = await adapter.updateMany({
model: "user",
@@ -280,27 +261,11 @@ describe("FileMakerAdapter", () => {
});
describe("FileMakerAdapter configuration", () => {
- it("should throw on invalid config", () => {
- expect(() =>
- FileMakerAdapter({
- odata: {
- serverUrl: "not-a-url",
- auth: { apiKey: "test" },
- database: "test.fmp12",
- },
- }),
- ).toThrow();
+ it("should throw on missing database", () => {
+ expect(() => FileMakerAdapter({} as any)).toThrow();
});
- it("should throw when database lacks .fmp12 extension", () => {
- expect(() =>
- FileMakerAdapter({
- odata: {
- serverUrl: "https://api.example.com",
- auth: { apiKey: "test" },
- database: "test",
- },
- }),
- ).toThrow();
+ it("should throw on null database", () => {
+ expect(() => FileMakerAdapter({ database: null } as any)).toThrow();
});
});
diff --git a/packages/better-auth/tests/e2e/adapter.test.ts b/packages/better-auth/tests/e2e/adapter.test.ts
index cd381ac1..60d8c681 100644
--- a/packages/better-auth/tests/e2e/adapter.test.ts
+++ b/packages/better-auth/tests/e2e/adapter.test.ts
@@ -1,8 +1,7 @@
+import { FMServerConnection } from "@proofkit/fmodata";
import { runAdapterTest } from "better-auth/adapters/test";
import { beforeAll, describe, expect, it } from "vitest";
-import { z } from "zod/v4";
import { FileMakerAdapter } from "../../src";
-import { createRawFetch } from "../../src/odata";
if (!process.env.FM_SERVER) {
throw new Error("FM_SERVER is not set");
@@ -17,23 +16,20 @@ if (!process.env.FM_PASSWORD) {
throw new Error("FM_PASSWORD is not set");
}
-const { fetch } = createRawFetch({
+const connection = new FMServerConnection({
serverUrl: process.env.FM_SERVER,
auth: {
username: process.env.FM_USERNAME,
password: process.env.FM_PASSWORD,
},
- database: process.env.FM_DATABASE,
- logging: "verbose", // Enable verbose logging to see the response details
});
+const db = connection.database(process.env.FM_DATABASE);
describe("My Adapter Tests", async () => {
beforeAll(async () => {
// reset the database
for (const table of ["user", "session", "account", "verification"]) {
- const result = await fetch(`/${table}`, {
- output: z.object({ value: z.array(z.any()) }),
- });
+ const result = await db._makeRequest<{ value: { id: string }[] }>(`/${table}`);
if (result.error) {
console.log("Error fetching records:", result.error);
@@ -42,7 +38,7 @@ describe("My Adapter Tests", async () => {
const records = result.data?.value || [];
for (const record of records) {
- const deleteResult = await fetch(`/${table}('${record.id}')`, {
+ const deleteResult = await db._makeRequest(`/${table}('${record.id}')`, {
method: "DELETE",
});
@@ -68,16 +64,9 @@ describe("My Adapter Tests", async () => {
const adapter = FileMakerAdapter({
debugLogs: {
- isRunningAdapterTests: true, // This is our super secret flag to let us know to only log debug logs if a test fails.
- },
- odata: {
- auth: {
- username: process.env.FM_USERNAME,
- password: process.env.FM_PASSWORD,
- },
- database: process.env.FM_DATABASE,
- serverUrl: process.env.FM_SERVER,
+ isRunningAdapterTests: true,
},
+ database: db,
});
await runAdapterTest({
@@ -102,14 +91,12 @@ describe("My Adapter Tests", async () => {
});
console.log(result);
-
- // expect(result.data).toHaveLength(1);
});
});
it("should properly filter by dates", async () => {
- // delete all users - using buildQuery to construct the filter properly
- const deleteAllResult = await fetch(`/user?$filter="id" ne '0'`, {
+ // delete all users
+ const deleteAllResult = await db._makeRequest(`/user?$filter="id" ne '0'`, {
method: "DELETE",
});
@@ -119,23 +106,19 @@ it("should properly filter by dates", async () => {
// create user
const date = new Date("2025-01-10").toISOString();
- const createResult = await fetch("/user", {
+ const createResult = await db._makeRequest<{ id: string }>("/user", {
method: "POST",
- body: {
+ body: JSON.stringify({
id: "filter-test",
createdAt: date,
- },
- output: z.object({ id: z.string() }),
+ }),
});
if (createResult.error) {
throw new Error(`Failed to create user: ${createResult.error}`);
}
- const result = await fetch("/user?$filter=createdAt ge 2025-01-05", {
- method: "GET",
- output: z.object({ value: z.array(z.any()) }),
- });
+ const result = await db._makeRequest<{ value: unknown[] }>("/user?$filter=createdAt ge 2025-01-05");
console.log(result);
@@ -146,7 +129,7 @@ it("should properly filter by dates", async () => {
expect(result.data?.value).toHaveLength(1);
// delete record
- const deleteResult = await fetch(`/user('filter-test')`, {
+ const deleteResult = await db._makeRequest(`/user('filter-test')`, {
method: "DELETE",
});
diff --git a/packages/better-auth/tests/e2e/migrate.test.ts b/packages/better-auth/tests/e2e/migrate.test.ts
index e37c382e..ea0985d4 100644
--- a/packages/better-auth/tests/e2e/migrate.test.ts
+++ b/packages/better-auth/tests/e2e/migrate.test.ts
@@ -1,6 +1,6 @@
+import { FMServerConnection } from "@proofkit/fmodata";
import { describe, expect, it } from "vitest";
-import { getMetadata } from "../src/migrate";
-import { createRawFetch } from "../src/odata";
+import { getMetadata } from "../../src/migrate";
function getTestEnv() {
const fmServer = process.env.FM_SERVER;
@@ -22,18 +22,17 @@ function getTestEnv() {
const { fmServer, fmDatabase, ottoApiKey } = getTestEnv();
-const { fetch } = createRawFetch({
+const connection = new FMServerConnection({
serverUrl: fmServer,
auth: {
apiKey: ottoApiKey,
},
- database: fmDatabase,
- logging: "verbose",
});
+const db = connection.database(fmDatabase);
describe("migrate", () => {
it("should get back metadata in JSON format", async () => {
- const metadata = await getMetadata(fetch, fmDatabase);
+ const metadata = await getMetadata(db);
expect(metadata).toBeDefined();
expect(typeof metadata).toBe("object");
});
@@ -42,63 +41,33 @@ describe("migrate", () => {
const tableName = "test_table";
// Delete table if it exists (cleanup)
- const _deleteResult = await fetch(`/FileMaker_Tables/${tableName}`, {
- method: "DELETE",
- });
- // Don't throw on delete errors as table might not exist
+ try {
+ await db.schema.deleteTable(tableName);
+ } catch {
+ // Table might not exist
+ }
// Create table
- const createResult = await fetch("/FileMaker_Tables", {
- method: "POST",
- body: {
- tableName,
- fields: [
- {
- name: "Company ID",
- type: "varchar",
- primary: true,
- },
- ],
+ await db.schema.createTable(tableName, [
+ {
+ name: "Company ID",
+ type: "string",
+ primary: true,
},
- });
-
- if (createResult.error) {
- throw new Error(`Failed to create table: ${createResult.error}`);
- }
+ ]);
// Add field to table
- const updateResult = await fetch(`/FileMaker_Tables/${tableName}`, {
- method: "PATCH",
- body: {
- fields: [
- {
- name: "Phone",
- type: "varchar",
- },
- ],
+ await db.schema.addFields(tableName, [
+ {
+ name: "Phone",
+ type: "string",
},
- });
-
- if (updateResult.error) {
- throw new Error(`Failed to update table: ${updateResult.error}`);
- }
+ ]);
// Delete field from table
- const deleteFieldResult = await fetch(`/FileMaker_Tables/${tableName}/Phone`, {
- method: "DELETE",
- });
-
- if (deleteFieldResult.error) {
- throw new Error(`Failed to delete field: ${deleteFieldResult.error}`);
- }
+ await db.schema.deleteField(tableName, "Phone");
// Delete table
- const deleteTableResult = await fetch(`/FileMaker_Tables/${tableName}`, {
- method: "DELETE",
- });
-
- if (deleteTableResult.error) {
- throw new Error(`Failed to delete table: ${deleteTableResult.error}`);
- }
+ await db.schema.deleteTable(tableName);
});
});
diff --git a/packages/better-auth/tests/parseWhere.test.ts b/packages/better-auth/tests/parseWhere.test.ts
index dc46c7d5..474bc9df 100644
--- a/packages/better-auth/tests/parseWhere.test.ts
+++ b/packages/better-auth/tests/parseWhere.test.ts
@@ -1,7 +1,6 @@
import type { CleanedWhere } from "better-auth/adapters";
import { describe, expect, test } from "vitest";
import { parseWhere } from "../src/adapter";
-import { validateUrl } from "../src/odata";
describe("parseWhere", () => {
test("should return empty string for empty where clause", () => {
@@ -208,24 +207,3 @@ describe("parseWhere", () => {
}
});
});
-
-describe("utils: validateUrl", () => {
- const validUrls = [
- "https://example.com",
- "https://example.com/path",
- "https://example.com/path?query=value",
- "https://example.com/path?query=value#fragment",
- "https://acme-dev.ottomatic.cloud",
- ];
- test("should validate a valid URL", () => {
- for (const url of validUrls) {
- const result = validateUrl(url);
- expect(result.isOk()).toBe(true);
- }
- });
-
- test("should return an error for an invalid URL", () => {
- const result = validateUrl("not-a-url");
- expect(result.isErr()).toBe(true);
- });
-});
diff --git a/packages/better-auth/tests/utils/mock-fetch.ts b/packages/better-auth/tests/utils/mock-fetch.ts
index 4d86fbb3..f9a2605f 100644
--- a/packages/better-auth/tests/utils/mock-fetch.ts
+++ b/packages/better-auth/tests/utils/mock-fetch.ts
@@ -1,108 +1,81 @@
/**
- * Mock Fetch Utility for OData API
+ * Mock Database Utility for OData API
*
- * Creates a mock fetch function that returns pre-recorded OData API responses.
- * Designed to be used with vitest's vi.stubGlobal to mock the global fetch.
- *
- * Usage:
- * ```ts
- * import { vi } from 'vitest';
- * import { createMockFetch, createMockFetchSequence } from './tests/utils/mock-fetch';
- * import { mockResponses } from './tests/fixtures/responses';
- *
- * // Mock a single response
- * vi.stubGlobal('fetch', createMockFetch(mockResponses['find-one-user']));
- *
- * // Mock a sequence of responses (for multi-call tests)
- * vi.stubGlobal('fetch', createMockFetchSequence([
- * mockResponses['find-one-user'],
- * mockResponses['update-user'],
- * ]));
- * ```
+ * Creates a mock Database object with _makeRequest that returns pre-recorded OData API responses.
+ * Matches requests by URL path and returns the corresponding fixture response.
*/
import type { MockResponse } from "../fixtures/responses";
-/**
- * Creates a mock fetch function that returns the provided response
- */
-export function createMockFetch(response: MockResponse): typeof fetch {
- return (_input: RequestInfo | URL, _init?: RequestInit): Promise => {
- const contentType = response.headers?.["content-type"] || "application/json";
- const isJson = contentType.includes("application/json");
+type MakeRequestResult = { data: T; error: undefined } | { data: undefined; error: Error };
- const headers = new Headers({
- "content-type": contentType,
- });
+interface MockDatabase {
+ _makeRequest(path: string, options?: RequestInit): Promise>;
+ // Stub properties for type compatibility
+ schema: {
+ createTable: () => Promise;
+ addFields: () => Promise;
+ };
+ getMetadata: () => Promise;
+ _getDatabaseName: string;
+}
- if (response.headers) {
- for (const [key, value] of Object.entries(response.headers)) {
- if (key !== "content-type" && value) {
- headers.set(key, value);
- }
+/**
+ * Creates a mock Database that returns the provided response for any _makeRequest call
+ */
+export function createMockDatabase(response: MockResponse): MockDatabase {
+ return {
+ _makeRequest: (): Promise> => {
+ if (response.status >= 200 && response.status < 300) {
+ return Promise.resolve({ data: response.response as T, error: undefined });
}
- }
-
- const responseBody = isJson ? JSON.stringify(response.response) : String(response.response);
-
- return Promise.resolve(
- new Response(responseBody, {
- status: response.status,
- statusText: response.status >= 200 && response.status < 300 ? "OK" : "Error",
- headers,
- }),
- );
+ return Promise.resolve({ data: undefined, error: new Error(`HTTP ${response.status}`) });
+ },
+ schema: {
+ createTable: async () => ({}),
+ addFields: async () => ({}),
+ },
+ getMetadata: async () => ({}),
+ _getDatabaseName: "test.fmp12",
};
}
/**
- * Creates a mock fetch function that returns responses in sequence
+ * Creates a mock Database that returns responses in sequence
* Useful for tests that make multiple API calls
*/
-export function createMockFetchSequence(responses: MockResponse[]): typeof fetch {
+export function createMockDatabaseSequence(responses: MockResponse[]): MockDatabase {
let callIndex = 0;
- return (_input: RequestInfo | URL, _init?: RequestInit): Promise => {
- const response = responses[callIndex];
- if (!response) {
- throw new Error(
- `Mock fetch called more times than expected. Call #${callIndex + 1}, but only ${responses.length} responses provided.`,
- );
- }
- callIndex++;
-
- const contentType = response.headers?.["content-type"] || "application/json";
- const isJson = contentType.includes("application/json");
-
- const headers = new Headers({
- "content-type": contentType,
- });
-
- if (response.headers) {
- for (const [key, value] of Object.entries(response.headers)) {
- if (key !== "content-type" && value) {
- headers.set(key, value);
- }
+ return {
+ _makeRequest: (): Promise> => {
+ const response = responses[callIndex];
+ if (!response) {
+ throw new Error(
+ `Mock _makeRequest called more times than expected. Call #${callIndex + 1}, but only ${responses.length} responses provided.`,
+ );
}
- }
+ callIndex++;
- const responseBody = isJson ? JSON.stringify(response.response) : String(response.response);
-
- return Promise.resolve(
- new Response(responseBody, {
- status: response.status,
- statusText: response.status >= 200 && response.status < 300 ? "OK" : "Error",
- headers,
- }),
- );
+ if (response.status >= 200 && response.status < 300) {
+ return Promise.resolve({ data: response.response as T, error: undefined });
+ }
+ return Promise.resolve({ data: undefined, error: new Error(`HTTP ${response.status}`) });
+ },
+ schema: {
+ createTable: async () => ({}),
+ addFields: async () => ({}),
+ },
+ getMetadata: async () => ({}),
+ _getDatabaseName: "test.fmp12",
};
}
/**
- * Helper to create a simple OData success response
+ * Helper to create a mock Database with a simple OData success response
*/
-export function createODataSuccessMock(value: unknown[]): typeof fetch {
- return createMockFetch({
+export function createODataSuccessMock(value: unknown[]): MockDatabase {
+ return createMockDatabase({
url: "https://api.example.com/mock",
method: "GET",
status: 200,
@@ -112,10 +85,10 @@ export function createODataSuccessMock(value: unknown[]): typeof fetch {
}
/**
- * Helper to create an OData error response
+ * Helper to create a mock Database with an OData error response
*/
-export function createODataErrorMock(statusCode: number, message: string): typeof fetch {
- return createMockFetch({
+export function createODataErrorMock(statusCode: number, message: string): MockDatabase {
+ return createMockDatabase({
url: "https://api.example.com/mock",
method: "GET",
status: statusCode,
diff --git a/packages/better-auth/tsconfig.json b/packages/better-auth/tsconfig.json
index ffbd0241..2973845f 100644
--- a/packages/better-auth/tsconfig.json
+++ b/packages/better-auth/tsconfig.json
@@ -2,6 +2,8 @@
"extends": "../../tsconfig.json",
"compilerOptions": {
/* If transpiling with TypeScript: */
+ "target": "ES2022",
+ "lib": ["DOM", "DOM.Iterable", "ES2022"],
"module": "ESNext",
"moduleResolution": "Bundler",
"outDir": "dist",
diff --git a/packages/fmodata/src/client/database.ts b/packages/fmodata/src/client/database.ts
index c0049933..c574a830 100644
--- a/packages/fmodata/src/client/database.ts
+++ b/packages/fmodata/src/client/database.ts
@@ -1,6 +1,7 @@
+import type { FFetchOptions } from "@fetchkit/ffetch";
import type { StandardSchemaV1 } from "@standard-schema/spec";
import { FMTable } from "../orm/table";
-import type { ExecutableBuilder, ExecutionContext, Metadata } from "../types";
+import type { ExecutableBuilder, ExecutionContext, Metadata, Result } from "../types";
import { BatchBuilder } from "./batch-builder";
import { EntitySet } from "./entity-set";
import { SchemaManager } from "./schema-manager";
@@ -53,6 +54,13 @@ export class Database {
this._includeSpecialColumns = (config?.includeSpecialColumns ?? false) as IncludeSpecialColumns;
}
+ /**
+ * @internal Used by adapter packages to access the database filename.
+ */
+ get _getDatabaseName(): string {
+ return this.databaseName;
+ }
+
/**
* @internal Used by EntitySet to access database configuration
*/
@@ -67,6 +75,14 @@ export class Database {
return this._includeSpecialColumns;
}
+ /**
+ * @internal Used by adapter packages for raw OData requests.
+ * Delegates to the connection's _makeRequest with the database name prepended.
+ */
+ _makeRequest(path: string, options?: RequestInit & FFetchOptions): Promise> {
+ return this.context._makeRequest(`/${this.databaseName}${path}`, options);
+ }
+
// biome-ignore lint/suspicious/noExplicitAny: Accepts any FMTable configuration
from>(table: T): EntitySet {
// Only override database-level useEntityIds if table explicitly sets it
diff --git a/packages/typegen/web/src/components/ui/data-grid.tsx b/packages/typegen/web/src/components/ui/data-grid.tsx
index 2cdaac35..27745832 100644
--- a/packages/typegen/web/src/components/ui/data-grid.tsx
+++ b/packages/typegen/web/src/components/ui/data-grid.tsx
@@ -5,7 +5,7 @@ import { createContext, type ReactNode, useContext } from "react";
import { cn } from "@/lib/utils";
declare module "@tanstack/react-table" {
- interface ColumnMeta {
+ interface ColumnMeta {
headerTitle?: string;
headerClassName?: string;
cellClassName?: string;
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index a185ef77..e5772007 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -196,6 +196,9 @@ importers:
'@commander-js/extra-typings':
specifier: ^14.0.0
version: 14.0.0(commander@14.0.2)
+ '@proofkit/fmodata':
+ specifier: workspace:*
+ version: link:../fmodata
'@tanstack/vite-config':
specifier: ^0.2.1
version: 0.2.1(@types/node@25.0.6)(rollup@4.55.1)(typescript@5.9.3)(vite@6.4.1(@types/node@25.0.6)(jiti@2.6.1)(lightningcss@1.30.2)(tsx@4.21.0)(yaml@2.8.2))
@@ -217,21 +220,12 @@ importers:
fs-extra:
specifier: ^11.3.3
version: 11.3.3
- neverthrow:
- specifier: ^8.2.0
- version: 8.2.0
- odata-query:
- specifier: ^8.0.7
- version: 8.0.7
prompts:
specifier: ^2.4.2
version: 2.4.2
vite:
specifier: ^6.4.1
version: 6.4.1(@types/node@25.0.6)(jiti@2.6.1)(lightningcss@1.30.2)(tsx@4.21.0)(yaml@2.8.2)
- zod:
- specifier: ^4.3.5
- version: 4.3.5
devDependencies:
'@types/fs-extra':
specifier: ^11.0.4
@@ -242,9 +236,6 @@ importers:
'@vitest/ui':
specifier: ^3.2.4
version: 3.2.4(vitest@4.0.17)
- fm-odata-client:
- specifier: ^3.0.2
- version: 3.0.2
publint:
specifier: ^0.3.16
version: 0.3.16
@@ -374,7 +365,7 @@ importers:
version: 11.0.0-rc.441(@trpc/server@11.0.0-rc.441)
'@trpc/next':
specifier: 11.0.0-rc.441
- version: 11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/react-query@11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/server@11.0.0-rc.441)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(@trpc/server@11.0.0-rc.441)(next@16.1.1(@babel/core@7.28.5)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)
+ version: 11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/react-query@11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/server@11.0.0-rc.441)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(@trpc/server@11.0.0-rc.441)(next@16.1.1(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)
'@trpc/react-query':
specifier: 11.0.0-rc.441
version: 11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/server@11.0.0-rc.441)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)
@@ -419,7 +410,7 @@ importers:
version: 16.1.1(@babel/core@7.28.5)(react-dom@19.2.3(react@19.2.3))(react@19.2.3)
next-auth:
specifier: ^4.24.13
- version: 4.24.13(next@16.1.1(@babel/core@7.28.5)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)
+ version: 4.24.13(next@16.1.1(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)
postgres:
specifier: ^3.4.8
version: 3.4.8
@@ -1202,9 +1193,6 @@ packages:
cpu: [x64]
os: [win32]
- '@borewit/text-codec@0.2.1':
- resolution: {integrity: sha512-k7vvKPbf7J2fZ5klGRD9AeKfUvojuZIQ3BT5u7Jfv+puwXkUBUT5PVyMDfJZpy30CBDXGMgw7fguK/lpOMBvgw==}
-
'@braidai/lang@1.1.2':
resolution: {integrity: sha512-qBcknbBufNHlui137Hft8xauQMTZDKdophmLFv05r2eNmdIv/MlPuP4TdUknHG68UdWLgVZwgxVe735HzJNIwA==}
@@ -2393,8 +2381,8 @@ packages:
resolution: {integrity: sha512-a61ljmRVVyG5MC/698C8/FfFDw5a8LOIvyOLW5fztgUXqUpc1jOfQzOitSCbge657OgXXThmY3Tk8fpiDb4UcA==}
engines: {node: '>= 20.0.0'}
- '@oxc-project/types@0.107.0':
- resolution: {integrity: sha512-QFDRbYfV2LVx8tyqtyiah3jQPUj1mK2+RYwxyFWyGoys6XJnwTdlzO6rdNNHOPorHAu5Uo34oWRKcvNpbJarmQ==}
+ '@oxc-project/types@0.111.0':
+ resolution: {integrity: sha512-bh54LJMafgRGl2cPQ/QM+tI5rWaShm/wK9KywEj/w36MhiPKXYM67H2y3q+9pr4YO7ufwg2AKdBAZkhHBD8ClA==}
'@oxc-resolver/binding-android-arm-eabi@11.16.2':
resolution: {integrity: sha512-lVJbvydLQIDZHKUb6Zs9Rq80QVTQ9xdCQE30eC9/cjg4wsMoEOg65QZPymUAIVJotpUAWJD0XYcwE7ugfxx5kQ==}
@@ -3315,79 +3303,79 @@ packages:
peerDependencies:
react: '>=18.2.0'
- '@rolldown/binding-android-arm64@1.0.0-beta.59':
- resolution: {integrity: sha512-6yLLgyswYwiCfls9+hoNFY9F8TQdwo15hpXDHzlAR0X/GojeKF+AuNcXjYNbOJ4zjl/5D6lliE8CbpB5t1OWIQ==}
+ '@rolldown/binding-android-arm64@1.0.0-rc.2':
+ resolution: {integrity: sha512-AGV80viZ4Hil4C16GFH+PSwq10jclV9oyRFhD+5HdowPOCJ+G+99N5AClQvMkUMIahTY8cX0SQpKEEWcCg6fSA==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [android]
- '@rolldown/binding-darwin-arm64@1.0.0-beta.59':
- resolution: {integrity: sha512-hqGXRc162qCCIOAcHN2Cw4eXiVTwYsMFLOhAy1IG2CxY+dwc/l4Ga+dLPkLor3Ikqy5WDn+7kxHbbh6EmshEpQ==}
+ '@rolldown/binding-darwin-arm64@1.0.0-rc.2':
+ resolution: {integrity: sha512-PYR+PQu1mMmQiiKHN2JiOctvH32Xc/Mf+Su2RSmWtC9BbIqlqsVWjbulnShk0imjRim0IsbkMMCN5vYQwiuqaA==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [darwin]
- '@rolldown/binding-darwin-x64@1.0.0-beta.59':
- resolution: {integrity: sha512-ezvvGuhteE15JmMhJW0wS7BaXmhwLy1YHeEwievYaPC1PgGD86wgBKfOpHr9tSKllAXbCe0BeeMvasscWLhKdA==}
+ '@rolldown/binding-darwin-x64@1.0.0-rc.2':
+ resolution: {integrity: sha512-X2G36Z6oh5ynoYpE2JAyG+uQ4kO/3N7XydM/I98FNk8VVgDKjajFF+v7TXJ2FMq6xa7Xm0UIUKHW2MRQroqoUA==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [darwin]
- '@rolldown/binding-freebsd-x64@1.0.0-beta.59':
- resolution: {integrity: sha512-4fhKVJiEYVd5n6no/mrL3LZ9kByfCGwmONOrdtvx8DJGDQhehH/q3RfhG3V/4jGKhpXgbDjpIjkkFdybCTcgew==}
+ '@rolldown/binding-freebsd-x64@1.0.0-rc.2':
+ resolution: {integrity: sha512-XpiFTsl9qjiDfrmJF6CE3dgj1nmSbxUIT+p2HIbXV6WOj/32btO8FKkWSsOphUwVinEt3R8HVkVrcLtFNruMMQ==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [freebsd]
- '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.59':
- resolution: {integrity: sha512-T3Y52sW6JAhvIqArBw+wtjNU1Ieaz4g0NBxyjSJoW971nZJBZygNlSYx78G4cwkCmo1dYTciTPDOnQygLV23pA==}
+ '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.2':
+ resolution: {integrity: sha512-zjYZ99e47Wlygs4hW+sQ+kshlO8ake9OoY2ecnJ9cwpDGiiIB9rQ3LgP3kt8j6IeVyMSksu//VEhc8Mrd1lRIw==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm]
os: [linux]
- '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.59':
- resolution: {integrity: sha512-NIW40jQDSQap2KDdmm9z3B/4OzWJ6trf8dwx3FD74kcQb3v34ThsBFTtzE5KjDuxnxgUlV+DkAu+XgSMKrgufw==}
+ '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.2':
+ resolution: {integrity: sha512-Piso04EZ9IHV1aZSsLQVMOPTiCq4Ps2UPL3pchjNXHGJGFiB9U42s22LubPaEBFS+i6tCawS5EarIwex1zC4BA==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [linux]
- '@rolldown/binding-linux-arm64-musl@1.0.0-beta.59':
- resolution: {integrity: sha512-CCKEk+H+8c0WGe/8n1E20n85Tq4Pv+HNAbjP1KfUXW+01aCWSMjU56ChNrM2tvHnXicfm7QRNoZyfY8cWh7jLQ==}
+ '@rolldown/binding-linux-arm64-musl@1.0.0-rc.2':
+ resolution: {integrity: sha512-OwJCeMZlmjKsN9pfJfTmqYpe3JC+L6RO87+hu9ajRLr1Lh6cM2FRQ8e48DLRyRDww8Ti695XQvqEANEMmsuzLw==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [linux]
- '@rolldown/binding-linux-x64-gnu@1.0.0-beta.59':
- resolution: {integrity: sha512-VlfwJ/HCskPmQi8R0JuAFndySKVFX7yPhE658o27cjSDWWbXVtGkSbwaxstii7Q+3Rz87ZXN+HLnb1kd4R9Img==}
+ '@rolldown/binding-linux-x64-gnu@1.0.0-rc.2':
+ resolution: {integrity: sha512-uQqBmA8dTWbKvfqbeSsXNUssRGfdgQCc0hkGfhQN7Pf85wG2h0Fd/z2d+ykyT4YbcsjQdgEGxBNsg3v4ekOuEA==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [linux]
- '@rolldown/binding-linux-x64-musl@1.0.0-beta.59':
- resolution: {integrity: sha512-kuO92hTRyGy0Ts3Nsqll0rfO8eFsEJe9dGQGktkQnZ2hrJrDVN0y419dMgKy/gB2S2o7F2dpWhpfQOBehZPwVA==}
+ '@rolldown/binding-linux-x64-musl@1.0.0-rc.2':
+ resolution: {integrity: sha512-ItZabVsICCYWHbP+jcAgNzjPAYg5GIVQp/NpqT6iOgWctaMYtobClc5m0kNtxwqfNrLXoyt998xUey4AvcxnGQ==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [linux]
- '@rolldown/binding-openharmony-arm64@1.0.0-beta.59':
- resolution: {integrity: sha512-PXAebvNL4sYfCqi8LdY4qyFRacrRoiPZLo3NoUmiTxm7MPtYYR8CNtBGNokqDmMuZIQIecRaD/jbmFAIDz7DxQ==}
+ '@rolldown/binding-openharmony-arm64@1.0.0-rc.2':
+ resolution: {integrity: sha512-U4UYANwafcMXSUC0VqdrqTAgCo2v8T7SiuTYwVFXgia0KOl8jiv3okwCFqeZNuw/G6EWDiqhT8kK1DLgyLsxow==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [openharmony]
- '@rolldown/binding-wasm32-wasi@1.0.0-beta.59':
- resolution: {integrity: sha512-yJoklQg7XIZq8nAg0bbkEXcDK6sfpjxQGxpg2Nd6ERNtvg+eOaEBRgPww0BVTrYFQzje1pB5qPwC2VnJHT3koQ==}
+ '@rolldown/binding-wasm32-wasi@1.0.0-rc.2':
+ resolution: {integrity: sha512-ZIWCjQsMon4tqRoao0Vzowjwx0cmFT3kublh2nNlgeasIJMWlIGHtr0d4fPypm57Rqx4o1h4L8SweoK2q6sMGA==}
engines: {node: '>=14.0.0'}
cpu: [wasm32]
- '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.59':
- resolution: {integrity: sha512-ljZ4+McmCbIuZwEBaoGtiG8Rq2nJjaXEnLEIx+usWetXn1ECjXY0LAhkELxOV6ytv4ensEmoJJ8nXg47hRMjlw==}
+ '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.2':
+ resolution: {integrity: sha512-NIo7vwRUPEzZ4MuZGr5YbDdjJ84xdiG+YYf8ZBfTgvIsk9wM0sZamJPEXvaLkzVIHpOw5uqEHXS85Gqqb7aaqQ==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [win32]
- '@rolldown/binding-win32-x64-msvc@1.0.0-beta.59':
- resolution: {integrity: sha512-bMY4tTIwbdZljW+xe/ln1hvs0SRitahQSXfWtvgAtIzgSX9Ar7KqJzU7lRm33YTRFIHLULRi53yNjw9nJGd6uQ==}
+ '@rolldown/binding-win32-x64-msvc@1.0.0-rc.2':
+ resolution: {integrity: sha512-bLKzyLFbvngeNPZocuLo3LILrKwCrkyMxmRXs6fZYDrvh7cyZRw9v56maDL9ipPas0OOmQK1kAKYwvTs30G21Q==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [win32]
@@ -3395,8 +3383,8 @@ packages:
'@rolldown/pluginutils@1.0.0-beta.27':
resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==}
- '@rolldown/pluginutils@1.0.0-beta.59':
- resolution: {integrity: sha512-aoh6LAJRyhtazs98ydgpNOYstxUlsOV1KJXcpf/0c0vFcUA8uyd/hwKRhqE/AAPNqAho9RliGsvitCoOzREoVA==}
+ '@rolldown/pluginutils@1.0.0-rc.2':
+ resolution: {integrity: sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==}
'@rollup/plugin-replace@6.0.3':
resolution: {integrity: sha512-J4RZarRvQAm5IF0/LwUUg+obsm+xZhYnbMXmXROyoSE1ATJe3oXSb9L5MMppdxP2ylNSjv6zFBwKYjcKMucVfA==}
@@ -3811,9 +3799,6 @@ packages:
resolution: {integrity: sha512-werDRwJSqzY28fbOBQ+wP7pQ6jl6Y+EJ8mA/dABOJEq2iBbGLXAzGPywRji7x4zULhjBDS3chQrR3nE7NVcoDw==}
engines: {node: '>=18'}
- '@tokenizer/token@0.3.0':
- resolution: {integrity: sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==}
-
'@trpc/client@11.0.0-rc.441':
resolution: {integrity: sha512-O9zHP7JcK35jO5G8BoW304WdRcHW1TKZae2QDU65KvfMxosbmqY2ajwAgs6CxTS45c1PuF9vI0kXtP52e3FYgQ==}
peerDependencies:
@@ -5186,10 +5171,6 @@ packages:
resolution: {integrity: sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==}
engines: {node: '>=18'}
- file-type@19.6.0:
- resolution: {integrity: sha512-VZR5I7k5wkD0HgFnMsq5hOsSc710MJMu5Nc5QYsbe38NN5iPV/XTObYLc/cpttRTf6lX538+5uO1ZQRhYibiZQ==}
- engines: {node: '>=18'}
-
file-uri-to-path@1.0.0:
resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==}
@@ -5212,15 +5193,6 @@ packages:
flatted@3.3.3:
resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==}
- fm-odata-client@3.0.2:
- resolution: {integrity: sha512-iPEJFUWLTlBLFfL4UISsW7Xk4G5D2/8vPUoWxncWGuPScdENwpUaWczJzdi/rx8mIk6OHbVvbe3i9+pd6kPXJg==}
- engines: {node: '>=0.20'}
- peerDependencies:
- amazon-cognito-identity-js: ^4.5.12
- peerDependenciesMeta:
- amazon-cognito-identity-js:
- optional: true
-
follow-redirects@1.15.11:
resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==}
engines: {node: '>=4.0'}
@@ -6649,10 +6621,6 @@ packages:
pathe@2.0.3:
resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==}
- peek-readable@5.4.2:
- resolution: {integrity: sha512-peBp3qZyuS6cNIJ2akRNG1uo1WJ1d0wTxg/fxMdZ0BqCVhx242bSFHM9eNqflfJVS9SsgkzgT/1UgnsurBOTMg==}
- engines: {node: '>=14.16'}
-
perfect-debounce@2.0.0:
resolution: {integrity: sha512-fkEH/OBiKrqqI/yIgjR92lMfs2K8105zt/VT6+7eTjNwisrsh47CeIED9z58zI7DfKdH3uHAn25ziRZn3kgAow==}
@@ -7018,8 +6986,8 @@ packages:
vue-tsc:
optional: true
- rolldown@1.0.0-beta.59:
- resolution: {integrity: sha512-Slm000Gd8/AO9z4Kxl4r8mp/iakrbAuJ1L+7ddpkNxgQ+Vf37WPvY63l3oeyZcfuPD1DRrUYBsRPIXSOhvOsmw==}
+ rolldown@1.0.0-rc.2:
+ resolution: {integrity: sha512-1g/8Us9J8sgJGn3hZfBecX1z4U3y5KO7V/aV2U1M/9UUzLNqHA8RfFQ/NPT7HLxOIldyIgrcjaYTRvA81KhJIg==}
engines: {node: ^20.19.0 || >=22.12.0}
hasBin: true
@@ -7320,10 +7288,6 @@ packages:
strnum@2.1.2:
resolution: {integrity: sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ==}
- strtok3@9.1.1:
- resolution: {integrity: sha512-FhwotcEqjr241ZbjFzjlIYg6c5/L/s4yBGWSMvJ9UoExiSqL+FnFA/CaeZx17WGaZMS/4SOZp8wH18jSS4R4lw==}
- engines: {node: '>=16'}
-
style-to-js@1.1.21:
resolution: {integrity: sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==}
@@ -7457,10 +7421,6 @@ packages:
resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==}
engines: {node: '>=0.6'}
- token-types@6.1.2:
- resolution: {integrity: sha512-dRXchy+C0IgK8WPC6xvCHFRIWYUbqqdEIKPaKo/AcTUNzwLTK6AH7RjdLWsEZcAN/TBdtfUw3PYEgPr5VPr6ww==}
- engines: {node: '>=14.16'}
-
totalist@3.0.1:
resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==}
engines: {node: '>=6'}
@@ -7655,10 +7615,6 @@ packages:
engines: {node: '>=0.8.0'}
hasBin: true
- uint8array-extras@1.5.0:
- resolution: {integrity: sha512-rvKSBiC5zqCCiDZ9kAOszZcDvdAHwwIKJG33Ykj43OKcWsnmcBRL09YTU4nOeHZ8Y2a7l1MgTd08SBe9A8Qj6A==}
- engines: {node: '>=18'}
-
ultracite@7.0.8:
resolution: {integrity: sha512-b98lKaVl3UtH1TF6gZjhPQgtx063i0XpdV1nHEfexHsLyLaaosqU9FT8Tw/HwQkb/UmJ8WihKndur0bSUT0BYw==}
hasBin: true
@@ -8421,8 +8377,6 @@ snapshots:
'@biomejs/cli-win32-x64@2.3.11':
optional: true
- '@borewit/text-codec@0.2.1': {}
-
'@braidai/lang@1.1.2': {}
'@changesets/apply-release-plan@7.0.14':
@@ -9522,7 +9476,7 @@ snapshots:
'@orama/orama@3.1.18': {}
- '@oxc-project/types@0.107.0': {}
+ '@oxc-project/types@0.111.0': {}
'@oxc-resolver/binding-android-arm-eabi@11.16.2':
optional: true
@@ -10432,50 +10386,50 @@ snapshots:
dependencies:
react: 19.2.3
- '@rolldown/binding-android-arm64@1.0.0-beta.59':
+ '@rolldown/binding-android-arm64@1.0.0-rc.2':
optional: true
- '@rolldown/binding-darwin-arm64@1.0.0-beta.59':
+ '@rolldown/binding-darwin-arm64@1.0.0-rc.2':
optional: true
- '@rolldown/binding-darwin-x64@1.0.0-beta.59':
+ '@rolldown/binding-darwin-x64@1.0.0-rc.2':
optional: true
- '@rolldown/binding-freebsd-x64@1.0.0-beta.59':
+ '@rolldown/binding-freebsd-x64@1.0.0-rc.2':
optional: true
- '@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.59':
+ '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.2':
optional: true
- '@rolldown/binding-linux-arm64-gnu@1.0.0-beta.59':
+ '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.2':
optional: true
- '@rolldown/binding-linux-arm64-musl@1.0.0-beta.59':
+ '@rolldown/binding-linux-arm64-musl@1.0.0-rc.2':
optional: true
- '@rolldown/binding-linux-x64-gnu@1.0.0-beta.59':
+ '@rolldown/binding-linux-x64-gnu@1.0.0-rc.2':
optional: true
- '@rolldown/binding-linux-x64-musl@1.0.0-beta.59':
+ '@rolldown/binding-linux-x64-musl@1.0.0-rc.2':
optional: true
- '@rolldown/binding-openharmony-arm64@1.0.0-beta.59':
+ '@rolldown/binding-openharmony-arm64@1.0.0-rc.2':
optional: true
- '@rolldown/binding-wasm32-wasi@1.0.0-beta.59':
+ '@rolldown/binding-wasm32-wasi@1.0.0-rc.2':
dependencies:
'@napi-rs/wasm-runtime': 1.1.1
optional: true
- '@rolldown/binding-win32-arm64-msvc@1.0.0-beta.59':
+ '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.2':
optional: true
- '@rolldown/binding-win32-x64-msvc@1.0.0-beta.59':
+ '@rolldown/binding-win32-x64-msvc@1.0.0-rc.2':
optional: true
'@rolldown/pluginutils@1.0.0-beta.27': {}
- '@rolldown/pluginutils@1.0.0-beta.59': {}
+ '@rolldown/pluginutils@1.0.0-rc.2': {}
'@rollup/plugin-replace@6.0.3(rollup@4.55.1)':
dependencies:
@@ -10901,13 +10855,11 @@ snapshots:
- typescript
- vite
- '@tokenizer/token@0.3.0': {}
-
'@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441)':
dependencies:
'@trpc/server': 11.0.0-rc.441
- '@trpc/next@11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/react-query@11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/server@11.0.0-rc.441)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(@trpc/server@11.0.0-rc.441)(next@16.1.1(@babel/core@7.28.5)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)':
+ '@trpc/next@11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/react-query@11.0.0-rc.441(@tanstack/react-query@5.90.16(react@19.2.3))(@trpc/client@11.0.0-rc.441(@trpc/server@11.0.0-rc.441))(@trpc/server@11.0.0-rc.441)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(@trpc/server@11.0.0-rc.441)(next@16.1.1(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)':
dependencies:
'@trpc/client': 11.0.0-rc.441(@trpc/server@11.0.0-rc.441)
'@trpc/server': 11.0.0-rc.441
@@ -12397,13 +12349,6 @@ snapshots:
dependencies:
is-unicode-supported: 2.1.0
- file-type@19.6.0:
- dependencies:
- get-stream: 9.0.1
- strtok3: 9.1.1
- token-types: 6.1.2
- uint8array-extras: 1.5.0
-
file-uri-to-path@1.0.0:
optional: true
@@ -12433,10 +12378,6 @@ snapshots:
flatted@3.3.3: {}
- fm-odata-client@3.0.2:
- dependencies:
- file-type: 19.6.0
-
follow-redirects@1.15.11: {}
foreground-child@3.3.1:
@@ -13943,7 +13884,7 @@ snapshots:
optionalDependencies:
'@rollup/rollup-linux-x64-gnu': 4.55.1
- next-auth@4.24.13(next@16.1.1(@babel/core@7.28.5)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3):
+ next-auth@4.24.13(next@16.1.1(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3):
dependencies:
'@babel/runtime': 7.28.4
'@panva/hkdf': 1.2.1
@@ -14247,8 +14188,6 @@ snapshots:
pathe@2.0.3: {}
- peek-readable@5.4.2: {}
-
perfect-debounce@2.0.0: {}
picocolors@1.1.1: {}
@@ -14691,7 +14630,7 @@ snapshots:
rfdc@1.4.1: {}
- rolldown-plugin-dts@0.15.10(oxc-resolver@11.16.2)(rolldown@1.0.0-beta.59)(typescript@5.9.3):
+ rolldown-plugin-dts@0.15.10(oxc-resolver@11.16.2)(rolldown@1.0.0-rc.2)(typescript@5.9.3):
dependencies:
'@babel/generator': 7.28.5
'@babel/parser': 7.28.5
@@ -14701,31 +14640,31 @@ snapshots:
debug: 4.4.3(supports-color@5.5.0)
dts-resolver: 2.1.3(oxc-resolver@11.16.2)
get-tsconfig: 4.13.0
- rolldown: 1.0.0-beta.59
+ rolldown: 1.0.0-rc.2
optionalDependencies:
typescript: 5.9.3
transitivePeerDependencies:
- oxc-resolver
- supports-color
- rolldown@1.0.0-beta.59:
- dependencies:
- '@oxc-project/types': 0.107.0
- '@rolldown/pluginutils': 1.0.0-beta.59
- optionalDependencies:
- '@rolldown/binding-android-arm64': 1.0.0-beta.59
- '@rolldown/binding-darwin-arm64': 1.0.0-beta.59
- '@rolldown/binding-darwin-x64': 1.0.0-beta.59
- '@rolldown/binding-freebsd-x64': 1.0.0-beta.59
- '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-beta.59
- '@rolldown/binding-linux-arm64-gnu': 1.0.0-beta.59
- '@rolldown/binding-linux-arm64-musl': 1.0.0-beta.59
- '@rolldown/binding-linux-x64-gnu': 1.0.0-beta.59
- '@rolldown/binding-linux-x64-musl': 1.0.0-beta.59
- '@rolldown/binding-openharmony-arm64': 1.0.0-beta.59
- '@rolldown/binding-wasm32-wasi': 1.0.0-beta.59
- '@rolldown/binding-win32-arm64-msvc': 1.0.0-beta.59
- '@rolldown/binding-win32-x64-msvc': 1.0.0-beta.59
+ rolldown@1.0.0-rc.2:
+ dependencies:
+ '@oxc-project/types': 0.111.0
+ '@rolldown/pluginutils': 1.0.0-rc.2
+ optionalDependencies:
+ '@rolldown/binding-android-arm64': 1.0.0-rc.2
+ '@rolldown/binding-darwin-arm64': 1.0.0-rc.2
+ '@rolldown/binding-darwin-x64': 1.0.0-rc.2
+ '@rolldown/binding-freebsd-x64': 1.0.0-rc.2
+ '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.2
+ '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.2
+ '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.2
+ '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.2
+ '@rolldown/binding-linux-x64-musl': 1.0.0-rc.2
+ '@rolldown/binding-openharmony-arm64': 1.0.0-rc.2
+ '@rolldown/binding-wasm32-wasi': 1.0.0-rc.2
+ '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.2
+ '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.2
rollup-plugin-preserve-directives@0.4.0(rollup@4.55.1):
dependencies:
@@ -15126,11 +15065,6 @@ snapshots:
strnum@2.1.2: {}
- strtok3@9.1.1:
- dependencies:
- '@tokenizer/token': 0.3.0
- peek-readable: 5.4.2
-
style-to-js@1.1.21:
dependencies:
style-to-object: 1.0.14
@@ -15253,12 +15187,6 @@ snapshots:
toidentifier@1.0.1: {}
- token-types@6.1.2:
- dependencies:
- '@borewit/text-codec': 0.2.1
- '@tokenizer/token': 0.3.0
- ieee754: 1.2.1
-
totalist@3.0.1: {}
touch@3.1.1: {}
@@ -15316,8 +15244,8 @@ snapshots:
diff: 8.0.2
empathic: 2.0.0
hookable: 5.5.3
- rolldown: 1.0.0-beta.59
- rolldown-plugin-dts: 0.15.10(oxc-resolver@11.16.2)(rolldown@1.0.0-beta.59)(typescript@5.9.3)
+ rolldown: 1.0.0-rc.2
+ rolldown-plugin-dts: 0.15.10(oxc-resolver@11.16.2)(rolldown@1.0.0-rc.2)(typescript@5.9.3)
semver: 7.7.3
tinyexec: 1.0.2
tinyglobby: 0.2.15
@@ -15412,8 +15340,6 @@ snapshots:
uglify-js@3.19.3:
optional: true
- uint8array-extras@1.5.0: {}
-
ultracite@7.0.8(typescript@5.9.3):
dependencies:
'@clack/prompts': 0.11.0