diff --git a/src/v2/index.ts b/src/v2/index.ts new file mode 100644 index 00000000..a7bd8244 --- /dev/null +++ b/src/v2/index.ts @@ -0,0 +1,2 @@ +export { RecordsV2, FlatfileRecord } from "./records"; +export * from "./records/types"; diff --git a/src/v2/records/FlatfileRecord.ts b/src/v2/records/FlatfileRecord.ts new file mode 100644 index 00000000..df47562b --- /dev/null +++ b/src/v2/records/FlatfileRecord.ts @@ -0,0 +1,569 @@ +import { + HASH_PROP_DELIM, + HASH_VALUE_DELIM, + asBool, + asDate, + asNullableString, + asNumber, + asString, + isPresent, +} from "./utils"; +import { JsonlRecord } from "./types"; + +// Browser-compatible UUID generation +function generateUUID(): string { + if (typeof crypto !== "undefined" && crypto.randomUUID) { + return crypto.randomUUID(); + } + // Fallback for older browsers + return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, function (c) { + const r = (Math.random() * 16) | 0; + const v = c === "x" ? r : (r & 0x3) | 0x8; + return v.toString(16); + }); +} + +type CastingMethod = "str" | "defStr" | "bool" | "num" | "date"; + +export class FlatfileRecord { + private _changes: Map = new Map(); + private _errs: Map> = new Map(); + private _metadata: Record; + private _deleted = false; + private _tempId?: string; + private _info: Map> = new Map(); + private _warns: Map> = new Map(); + private _committed = false; + + constructor(public data: Readonly) { + this._metadata = data.__m || {}; + Object.freeze(this.data); + + if (data.__i) { + data.__i.forEach((message: { x: string; m: string; t?: string }) => { + if (!message.t || message.t === "error") { + if (!this._errs.has(message.x)) { + this._errs.set(message.x, new Set()); + } + this._errs.get(message.x)?.add(message.m); + } else if (message.t === "info") { + if (!this._info.has(message.x)) { + this._info.set(message.x, new Set()); + } + this._info.get(message.x)?.add(message.m); + } else if (message.t === "warn") { + if (!this._warns.has(message.x)) { + this._warns.set(message.x, new Set()); + } + this._warns.get(message.x)?.add(message.m); + } + }); + } + } + + get id() { + return this.data.__k || this._tempId; + } + + get meta(): Record { + return this._metadata; + } + + get slug() { + return this.data.__n; + } + + get sheetId() { + return this.data.__s; + } + + getLinks(key?: string) { + if (key) { + return this.data.__l?.filter((link) => link.__x === key) || []; + } + return this.data.__l || []; + } + + set(key: string, value: any) { + if (this.data[key] === value) { + this._changes.delete(key); + return; + } + this._changes.set(key, value); + // Mark as uncommitted when changes are made + this._committed = false; + return this; + } + + flag(key: string) { + this.set(key, true); + } + + unflag(key: string) { + this.set(key, false); + } + + get(key: string) { + if (this._changes.has(key)) { + return this._changes.get(key); + } + return this.data[key]; + } + + has(key: string) { + return isPresent(this.get(key)); + } + + hasAny(...keys: string[]) { + return keys.some((k) => this.has(k)); + } + + hasAll(...keys: string[]) { + return keys.every((k) => this.has(k)); + } + + isEmpty(key: string) { + return !this.has(key); + } + + keys(options?: { omit?: string[]; pick?: string[] }): string[] { + const set = new Set(Object.keys(this.data).filter((key) => !key.startsWith("__"))); + + for (const key of this._changes.keys()) { + if (!key.startsWith("__")) { + set.add(key); + } + } + const res = Array.from(set); + + const omitKeys = options?.omit; + if (omitKeys?.length) { + return res.filter((key) => !omitKeys.includes(key)); + } + + const pickKeys = options?.pick; + if (pickKeys?.length) { + return res.filter((key) => pickKeys.includes(key)); + } + + return res; + } + + keysWithData(props?: { exclude?: Array }): string[] { + const keys = this.keys().filter((k) => this.has(k)); + if (props?.exclude) { + const f = props.exclude.flat(); + return keys.filter((k) => !f.includes(k)); + } + return keys; + } + + intersects(item: FlatfileRecord, keys: string[]) { + return keys.every((key) => { + const value1 = this.str(key); + const value2 = item.str(key); + return value1 === value2; + }); + } + + hash(...keys: string[]) { + return keys + .map((k) => [k, this.get(k)]) + .map(([k, v]) => `${k}${HASH_VALUE_DELIM}${asString(v)}`) + .join(HASH_PROP_DELIM); + } + + isDirty(key?: string): boolean { + if (key) { + return ( + this._changes.has(key) || + (this._errs.get(key)?.size ?? 0) > 0 || + (this._info.get(key)?.size ?? 0) > 0 || + (this._warns.get(key)?.size ?? 0) > 0 + ); + } + return ( + this._changes.size > 0 || + this._errs.size > 0 || + this._info.size > 0 || + this._warns.size > 0 || + this._deleted || + // New records (no __k) are dirty until they've been committed + (!this.get("__k") && !this._committed) + ); + } + + /** + * Manually mark the record as dirty, forcing it to be written on the next save operation. + * Useful when you suspect the record may have been changed by another process. + * This adds all current field values to the changes, ensuring the full record will be written. + * + * @example + * ```typescript + * const record = new FlatfileRecord({ id: '123', name: 'John' }); + * record.commit(); // Record is now clean + * + * // Later, you suspect the record may have been modified externally + * record.setDirty(); // Mark as dirty + * await recordsV2.write([record]); // Will write the full record + * ``` + */ + setDirty(): this { + // Add all non-system fields to changes to mark entire record as dirty + for (const key in this.data) { + if (!key.startsWith("__")) { + this._changes.set(key, this.data[key]); + } + } + this._committed = false; + + // Note: We don't need to explicitly preserve _errs, _warns, _info, or _deleted + // because those are already separate state that isDirty() checks independently + // They remain intact and will make the record dirty regardless of _changes + + return this; + } + + eachOfKeysPresent(keys: string[], callback: (key: string, value: any) => void) { + for (const key of keys) { + if (this.has(key)) { + callback(key, this.get(key)); + } + } + } + + isDeleted(): boolean { + return this._deleted; + } + + delete() { + this._deleted = true; + this._committed = false; + } + + str(key: string) { + return asNullableString(this.get(key)); + } + + defStr(key: string): string { + return asString(this.get(key)); + } + + bool(key: string) { + return asBool(this.get(key)); + } + + num(key: string) { + return asNumber(this.get(key)); + } + + date(key: string) { + return asDate(this.get(key)); + } + + pick(...keys: string[]) { + const obj: Record = {}; + for (const key of keys) { + obj[key] = this.get(key); + } + return obj; + } + + err(key: string, msg: string) { + if (!this._errs.has(key)) { + this._errs.set(key, new Set([msg])); + } + const errors = this._errs.get(key); + if (errors) { + errors.add(msg); + } + this._committed = false; + return this; + } + + values(castAs?: CastingMethod) { + if (!castAs) { + return Object.fromEntries(this.entries()); + } + + return Object.fromEntries(this.keys().map((key) => [key, this[castAs](key)])); + } + + entries() { + return this.keys().map((key) => [key, this.get(key)]); + } + + merge(item: FlatfileRecord, props: { overwrite?: boolean } = {}) { + for (const key of item.keys()) { + if (props.overwrite) { + this.set(key, item.get(key)); + } else if (!this.has(key)) { + this.set(key, item.get(key)); + } + } + return this; + } + + hasConflict(b: FlatfileRecord, keys?: string[]) { + if (keys) { + return keys.some((key) => { + const aValue = this.get(key); + const bValue = b.get(key); + return aValue && bValue && aValue !== bValue; + }); + } + return this.entries().some(([key, aValue]) => { + const bValue = b.get(key); + return aValue && bValue && aValue !== bValue; + }); + } + + toJSON(): JsonlRecord { + return { ...this.data, ...this.changeset() } as JsonlRecord; + } + + toSimpleRecord() { + return { + _id: this.id, + ...this.values(), + }; + } + + copy(props: { mixin?: FlatfileRecord; select?: string[]; slug?: string; sheetId?: string } = {}) { + const newObj = new FlatfileRecord({} as JsonlRecord); + newObj._tempId = `TEMP_${generateUUID()}`; + if (props.slug) { + newObj.set("__n", props.slug); + } + if (props.sheetId) { + newObj.set("__s", props.sheetId); + } + if (props.select) { + for (const key of props.select) { + newObj.set(key, props.mixin?.get(key) ?? this.get(key)); + } + } else { + for (const key in this.data) { + if (!key.startsWith("__")) { + newObj.set(key, this.get(key)); + } + } + if (props.mixin) { + for (const key in props.mixin.data) { + if (!key.startsWith("__")) { + newObj.set(key, props.mixin.get(key)); + } + } + } + } + return newObj; + } + + commit() { + const newObj: Record = Object.assign({}, this.data); + for (const [key, value] of this._changes) { + newObj[key] = value; + } + this._changes.clear(); + this._committed = true; + + if (this._errs.size) { + newObj.__i = []; + for (const [key, errs] of this._errs) { + for (const err of errs) { + newObj.__i.push({ x: key, m: err }); + } + } + } + this._errs.clear(); + + if (this._info.size) { + newObj.__i = newObj.__i || []; + for (const [key, errs] of this._info) { + for (const err of errs) { + newObj.__i.push({ x: key, m: err, t: "info" }); + } + } + } + this._info.clear(); + + if (this._warns.size) { + newObj.__i = newObj.__i || []; + for (const [key, errs] of this._warns) { + for (const err of errs) { + newObj.__i.push({ x: key, m: err, t: "warn" }); + } + } + } + this._warns.clear(); + + this.data = Object.freeze(newObj) as any; + } + + changeset() { + // For new records (no __k), include all data + // For existing records, include only changes + if (!this.get("__k")) { + // New record - return all data (like toJSON but avoid circular call) + const val: Record = { ...this.data, ...Object.fromEntries(this._changes) }; + + if (this._deleted) { + val.__d = true; + } + if (this._errs.size || this._info.size || this._warns.size) { + if (!val.__i) { + val.__i = []; + } + for (const [key, errs] of this._errs) { + for (const err of errs) { + val[key] = this.get(key); + val.__i.push({ x: key, m: err }); + } + } + for (const [key, infos] of this._info) { + for (const info of infos) { + val[key] = this.get(key); + + val.__i.push({ x: key, m: info, t: "info" }); + } + } + for (const [key, warns] of this._warns) { + for (const warn of warns) { + val[key] = this.get(key); + + val.__i.push({ x: key, m: warn, t: "warn" }); + } + } + } + return val; + } + + // Existing record - return only changes + const val = Object.fromEntries(this._changes); + val.__k = this.get("__k"); + val.__s = this.get("__s"); + val.__n = this.get("__n"); + val.__m = this.get("__m"); + val.__c = this.get("__c"); + + if (this._deleted) { + val.__d = true; + } + if (this._errs.size || this._info.size || this._warns.size) { + if (!val.__i) { + val.__i = []; + } + for (const [key, errs] of this._errs) { + for (const err of errs) { + val[key] = this.get(key); + val.__i.push({ x: key, m: err }); + } + } + for (const [key, infos] of this._info) { + for (const info of infos) { + val[key] = this.get(key); + + val.__i.push({ x: key, m: info, t: "info" }); + } + } + for (const [key, warns] of this._warns) { + for (const warn of warns) { + val[key] = this.get(key); + + val.__i.push({ x: key, m: warn, t: "warn" }); + } + } + } + return val; + } + + /** + * @deprecated use .err() instead + */ + addError(key: string, msg: string) { + return this.err(key, msg); + } + + hasError(...keys: string[]) { + if (keys.length > 0) { + return keys.some((key) => this._errs.has(key) && this._errs.get(key)!.size > 0); + } + return this._errs.size > 0; + } + + errorFields(...keys: string[]) { + if (keys.length > 0) { + return keys.filter((key) => this._errs.has(key) && this._errs.get(key)!.size > 0); + } + return Array.from(this._errs.keys()); + } + + errorIf(key: string, cb: (val: any) => any, err: string) { + if (cb(this.get(key))) { + this.err(key, err); + } + } + + info(key: string, msg: string) { + if (!this._info.has(key)) { + this._info.set(key, new Set([msg])); + } + const infos = this._info.get(key); + if (infos) { + infos.add(msg); + } + this._committed = false; + return this; + } + + /** + * @deprecated use .info() instead + */ + addComment(key: string, msg: string) { + return this.info(key, msg); + } + + /** + * @deprecated use .info() instead + */ + addInfo(key: string, msg: string) { + return this.info(key, msg); + } + + warn(key: string, msg: string) { + if (!this._warns.has(key)) { + this._warns.set(key, new Set([msg])); + } + const warnings = this._warns.get(key); + if (warnings) { + warnings.add(msg); + } + this._committed = false; + return this; + } + + /** + * @deprecated use .warn() instead + */ + addWarning(key: string, msg: string) { + return this.warn(key, msg); + } + + setReadOnly(key: string) { + this.setFieldConfig(key, { readonly: true }); + } + + setConfig(setter: (config: any) => any) { + const baseConfig = this.data.__c || {}; + this._changes.set("__c", setter(baseConfig)); + } + + setFieldConfig(key: string, newConfig: any) { + this.setConfig((config: any) => { + config.fields = config.fields || {}; + const baseConfig = config.fields[key as string] || {}; + config.fields[key as string] = { ...baseConfig, ...newConfig }; + return config; + }); + } +} diff --git a/src/v2/records/index.ts b/src/v2/records/index.ts index bfcb1769..5ab9c456 100644 --- a/src/v2/records/index.ts +++ b/src/v2/records/index.ts @@ -9,6 +9,7 @@ import { WriteRecordsResponse, WriteStreamingOptions, } from "./types"; +import { FlatfileRecord } from "./FlatfileRecord"; import * as environments from "../../environments"; import * as errors from "../../errors"; import * as serializers from "../../serialization"; @@ -124,6 +125,83 @@ export class RecordsV2 { } } + /** + * Retrieve records from a sheet as FlatfileRecord objects. + * + * This method fetches all records at once and returns them as an array of + * FlatfileRecord objects, which provide a rich API for manipulating record + * data including typed getters, validation, error handling, and change tracking. + * + * @param sheetId - The ID of the sheet to retrieve records from + * @param options - Optional request parameters for filtering, pagination, etc. + * @param requestOptions - Optional request configuration (headers, timeout, etc.) + * @returns Promise that resolves to an array of FlatfileRecord objects + * + * @example + * ```typescript + * const records = await recordsV2.get('us_sh_123', { + * fields: ['firstName', 'lastName'], + * pageSize: 1000 + * }); + * records.forEach(record => { + * console.log(`Record ID: ${record.id}`); + * console.log(`First Name: ${record.str('firstName')}`); + * console.log(`Last Name: ${record.str('lastName')}`); + * }); + * ``` + */ + public async get( + sheetId: Flatfile.SheetId, + options: GetRecordsRequestOptions = {}, + requestOptions: FernRecords.RequestOptions = {}, + ): Promise { + const rawRecords = await this.getRaw(sheetId, options, requestOptions); + return rawRecords.map(rawRecord => new FlatfileRecord(rawRecord)); + } + + /** + * Stream records from a sheet as FlatfileRecord objects. + * + * This method provides an async generator that yields FlatfileRecord objects + * as they are received from the server. FlatfileRecord objects provide a rich + * API for manipulating record data including typed getters, validation, error + * handling, and change tracking. This is the most memory-efficient way to + * process large datasets while maintaining the full functionality of FlatfileRecord. + * + * @param sheetId - The ID of the sheet to retrieve records from + * @param options - Optional request parameters for filtering, pagination, etc. + * @param requestOptions - Optional request configuration (headers, timeout, etc.) + * @returns AsyncGenerator that yields FlatfileRecord objects + * + * @example + * ```typescript + * for await (const record of recordsV2.getStreaming('us_sh_123', { + * includeTimestamps: true + * })) { + * console.log(`Record ID: ${record.id}`); + * console.log(`Updated at: ${record.get('__u')}`); + * + * // Use rich FlatfileRecord API + * if (record.has('email')) { + * console.log(`Email: ${record.str('email')}`); + * } + * + * if (record.hasError()) { + * console.log('Record has validation errors'); + * } + * } + * ``` + */ + public async *getStreaming( + sheetId: Flatfile.SheetId, + options: GetRecordsRequestOptions = {}, + requestOptions: FernRecords.RequestOptions = {}, + ): AsyncGenerator { + for await (const rawRecord of this.getRawStreaming(sheetId, options, requestOptions)) { + yield new FlatfileRecord(rawRecord); + } + } + /** * Stream JSONL response using ReadableStream (modern browsers) */ @@ -288,6 +366,255 @@ export class RecordsV2 { } } + /** + * Write FlatfileRecord objects to a sheet. + * + * This method takes an array of FlatfileRecord objects and writes them to the specified sheet. + * If truncate is true, it writes full record data using toJSON(). If truncate is false (default), + * it only writes changesets for dirty records. After successful write, all records are committed + * to clear their dirty state. + * + * @param records - Array of FlatfileRecord objects to write + * @param options - Write configuration options + * @param requestOptions - Optional request configuration (headers, timeout, etc.) + * @returns Promise that resolves to WriteRecordsResponse with operation results + * + * @example + * ```typescript + * const records = [ + * new FlatfileRecord({ firstName: 'John', lastName: 'Doe' }), + * new FlatfileRecord({ __k: 'us_rc_456', firstName: 'Jane' }) // Update existing + * ]; + * records[0].set('email', 'john@example.com'); // Make changes + * + * const result = await recordsV2.write(records, { + * sheetId: 'us_sh_123', + * truncate: false + * }); + * console.log(`Created: ${result.created}, Updated: ${result.updated}`); + * ``` + */ + public async write( + records: FlatfileRecord[], + options: WriteRecordsRequestOptions = {}, + requestOptions: FernRecords.RequestOptions = {}, + ): Promise { + if (records.length === 0) { + throw new Error("No records provided to write."); + } + + const url = await this._buildUrl(`/v2-alpha/records.jsonl`); + + // Serialize records based on truncate option + let jsonlBody: string; + if (options.truncate) { + // For truncate, write all records as full JSON + jsonlBody = records + .filter(r => r.id && !r.isDeleted()) // Only include non-deleted records + .map(r => { + const json = r.toJSON(); + // Ensure sheet ID is set if provided in options + if (options.sheetId && !json.__s) { + json.__s = options.sheetId; + } + return JSON.stringify(json); + }) + .join('\n'); + } else { + // For non-truncate, only write changesets of dirty records + const dirtyRecords = records + .filter(r => { + // Filter out temporary records that have been deleted + if (r.id?.startsWith('TEMP_') && r.isDeleted()) { + return false; + } + // Only include dirty records + return r.isDirty(); + }); + + if (dirtyRecords.length === 0) { + throw new Error("No changes made to the records that would need to be written."); + } + + jsonlBody = dirtyRecords + .map(r => { + const changeset = r.changeset(); + // Ensure sheet ID is set if provided in options + if (options.sheetId && !changeset.__s) { + changeset.__s = options.sheetId; + } + return JSON.stringify(changeset); + }) + .join('\n'); + } + + // Add options as query parameters, excluding sheetId since it's in the record body + const { sheetId: _, ...queryOptions } = options; + const queryParams = this._buildQueryParams(queryOptions, false); // Write operation + if (queryParams.length > 0) { + url.search = queryParams; + } + + const headers = await this._prepareHeaders(requestOptions, "application/jsonl"); + + // Add sheet ID header if provided in options + if (options.sheetId) { + headers["X-Sheet-Id"] = options.sheetId; + } + + const response = await this._executeRequest(url.toString(), "POST", { + body: jsonlBody, + contentType: "application/jsonl", + headers, + requestOptions, + }); + + // Parse the response + const responseBody = await response.text(); + let result: WriteRecordsResponse; + try { + result = JSON.parse(responseBody) as WriteRecordsResponse; + } catch (error) { + // If response isn't JSON, return a basic success response + result = { success: true }; + } + + // Commit all records after successful write to clear dirty state + records.forEach(r => r.commit()); + + return result; + } + + /** + * Stream FlatfileRecord objects to a sheet using HTTP body streaming. + * + * This method accepts an async generator/iterator of FlatfileRecord objects and streams them + * directly to the server. Like the write method, it handles truncate vs changeset logic, + * but processes records as they stream without loading all data into memory at once. + * + * @param recordsStream - Async generator/iterator that yields FlatfileRecord objects + * @param options - Write configuration options (sheetId, truncate, etc.) + * @param requestOptions - Optional request configuration (headers, timeout, etc.) + * @returns Promise that resolves to WriteRecordsResponse with operation results + * + * @example + * ```typescript + * async function* generateRecords() { + * for (let i = 0; i < 100000; i++) { + * const record = new FlatfileRecord({ + * firstName: `User${i}`, + * email: `user${i}@example.com` + * }); + * record.set('processed', true); + * yield record; + * } + * } + * + * const result = await recordsV2.writeStreaming(generateRecords(), { + * sheetId: 'us_sh_123', + * truncate: false + * }); + * ``` + */ + public async writeStreaming( + recordsStream: AsyncIterable, + options: WriteStreamingOptions = {}, + requestOptions: FernRecords.RequestOptions = {}, + ): Promise { + const url = await this._buildUrl(`/v2-alpha/records.jsonl`); + + // Add options as query parameters, excluding sheetId since it's in the record body + const { sheetId, ...queryOptions } = options; + const queryParams = this._buildQueryParams(queryOptions, false); // Write operation + if (queryParams.length > 0) { + url.search = queryParams; + } + + // Track records for committing after successful write + const processedRecords: FlatfileRecord[] = []; + let hasChanges = false; + + // Create ReadableStream that processes FlatfileRecord objects + const readableStream = new ReadableStream({ + async start(controller) { + const encoder = new TextEncoder(); + try { + for await (const record of recordsStream) { + processedRecords.push(record); + + // Apply the same logic as write() method + let shouldInclude = false; + let jsonlLine: string; + + if (options.truncate) { + // For truncate, include all non-deleted records + if (record.id && !record.isDeleted()) { + shouldInclude = true; + const json = record.toJSON(); + // Ensure sheet ID is set if provided in options + if (sheetId && !json.__s) { + json.__s = sheetId; + } + jsonlLine = JSON.stringify(json); + } + } else { + // For non-truncate, only include dirty records (exclude temp deleted records) + if (!(record.id?.startsWith('TEMP_') && record.isDeleted()) && record.isDirty()) { + shouldInclude = true; + const changeset = record.changeset(); + // Ensure sheet ID is set if provided in options + if (sheetId && !changeset.__s) { + changeset.__s = sheetId; + } + jsonlLine = JSON.stringify(changeset); + } + } + + if (shouldInclude) { + hasChanges = true; + controller.enqueue(encoder.encode(jsonlLine! + '\n')); + } + } + + // Check if we have any changes to write (only for non-truncate mode) + if (!options.truncate && !hasChanges) { + controller.error(new Error("No changes made to the records that would need to be written.")); + return; + } + + controller.close(); + } catch (error) { + controller.error(error); + } + }, + }); + + const headers = await this._prepareHeaders(requestOptions, "application/jsonl"); + + // Execute the streaming request + const response = await this._executeRequest(url.toString(), "POST", { + body: readableStream as any, // TypeScript might complain about ReadableStream + contentType: "application/jsonl", + headers, + requestOptions, + }); + + // Parse the response + const responseBody = await response.text(); + let result: WriteRecordsResponse; + try { + result = JSON.parse(responseBody) as WriteRecordsResponse; + } catch (error) { + // If response isn't JSON, return a basic success response + result = { success: true }; + } + + // Commit all processed records after successful write to clear dirty state + processedRecords.forEach(r => r.commit()); + + return result; + } + /** * Stream records to a sheet in raw JSONL format using HTTP body streaming. * @@ -610,3 +937,5 @@ export class RecordsV2 { } } } + +export { FlatfileRecord }; diff --git a/src/v2/records/types.ts b/src/v2/records/types.ts index e52a02a7..3f20bd58 100644 --- a/src/v2/records/types.ts +++ b/src/v2/records/types.ts @@ -1,6 +1,8 @@ import * as Flatfile from "../../api/index"; import { Sheet } from "../../serialization"; +export type Primitive = string | number | boolean | null | undefined; + interface SheetSearchParams { filter?: string; filterField?: string; @@ -119,6 +121,11 @@ interface JsonlRecordSpecialParams { * Record-level updated timestamp (when includeTimestamps=true) */ __u?: string; + + /** + * Link type identifier (used when this record represents a link) + */ + __x?: string; } export interface JsonlRecord extends JsonlRecordSpecialParams { diff --git a/src/v2/records/utils.ts b/src/v2/records/utils.ts new file mode 100644 index 00000000..5fa0af1d --- /dev/null +++ b/src/v2/records/utils.ts @@ -0,0 +1,58 @@ +/** + * Utility functions for type casting in FlatfileRecord + */ + +export function isPresent(value: any): boolean { + return value !== null && value !== undefined && value !== ""; +} + +export function asString(value: any): string { + if (value === null || value === undefined) { + return ""; + } + return String(value); +} + +export function asNullableString(value: any): string | null { + if (value === null || value === undefined || value === "") { + return null; + } + return String(value); +} + +export function asBool(value: any): boolean { + if (typeof value === "boolean") { + return value; + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + return lower === "true" || lower === "yes" || lower === "1"; + } + if (typeof value === "number") { + return value !== 0; + } + return Boolean(value); +} + +export function asNumber(value: any): number | null { + if (value === null || value === undefined || value === "") { + return null; + } + const num = Number(value); + return isNaN(num) ? null : num; +} + +export function asDate(value: any): Date | null { + if (value === null || value === undefined || value === "") { + return null; + } + if (value instanceof Date) { + return value; + } + const date = new Date(value); + return isNaN(date.getTime()) ? null : date; +} + +// Constants for hash generation +export const HASH_PROP_DELIM = "|"; +export const HASH_VALUE_DELIM = ":"; diff --git a/tests/bun/records.test.ts b/tests/bun/records.test.ts index f75aceb2..42682bba 100644 --- a/tests/bun/records.test.ts +++ b/tests/bun/records.test.ts @@ -1,6 +1,6 @@ import fs from "fs"; import { join } from "path"; -import { RecordsV2 } from "../../src/v2/records"; +import { RecordsV2, FlatfileRecord } from "../../src/v2/records"; import { Flatfile } from "../../src"; // Mock the global fetch function @@ -324,4 +324,701 @@ invalid json line expect(result).toEqual(mockResponse); }); }); + + describe("get", () => { + it("should fetch and return FlatfileRecord objects", async () => { + const fixtureData = getFixtureData(); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(fixtureData), + body: null, + }); + + const result = await recordsV2.get(sheetId, {}, defaultRequestOptions); + + expect(result).toHaveLength(21); + + // Check that results are FlatfileRecord instances + const firstRecord = result[0]; + expect(firstRecord).toBeInstanceOf(FlatfileRecord); + + // Test FlatfileRecord methods + expect(firstRecord.id).toBe("dev_rc_a5d2afda7dda4149afe51229e2674906"); + expect(firstRecord.sheetId).toBe("dev_sh_jVnmFCKg"); + expect(firstRecord.slug).toBe("contacts-pCZHI4"); + expect(firstRecord.str("firstname")).toBe("John"); + expect(firstRecord.str("lastname")).toBe("Smith [X]"); + expect(firstRecord.str("email")).toBe("john.smith@example.com"); + + // Test presence checks + expect(firstRecord.has("firstname")).toBe(true); + expect(firstRecord.has("nonexistent")).toBe(false); + + // Test type conversion + expect(firstRecord.defStr("firstname")).toBe("John"); + expect(firstRecord.str("nonexistent")).toBeNull(); + }); + + it("should handle empty response for get", async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(""), + body: null, + }); + + const result = await recordsV2.get(sheetId, {}, defaultRequestOptions); + expect(result).toEqual([]); + }); + + it("should skip malformed JSONL lines and return FlatfileRecord objects", async () => { + const malformedData = `{"__k":"valid1","name":"John"} +invalid json line +{"__k":"valid2","name":"Jane"}`; + + // Mock console.warn to avoid test output noise + const consoleSpy = jest.spyOn(console, "warn").mockImplementation(); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(malformedData), + body: null, + }); + + const result = await recordsV2.get(sheetId, {}, defaultRequestOptions); + + expect(result).toHaveLength(2); + expect(result[0]).toBeInstanceOf(FlatfileRecord); + expect(result[1]).toBeInstanceOf(FlatfileRecord); + expect(result[0].id).toBe("valid1"); + expect(result[1].id).toBe("valid2"); + expect(result[0].str("name")).toBe("John"); + expect(result[1].str("name")).toBe("Jane"); + + consoleSpy.mockRestore(); + }); + + it("should handle FlatfileRecord manipulation", async () => { + const testData = `{"__k":"test_id","name":"Test","email":"test@example.com"}`; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(testData), + body: null, + }); + + const result = await recordsV2.get(sheetId, {}, defaultRequestOptions); + const record = result[0]; + + // Test change tracking + expect(record.isDirty()).toBe(false); + record.set("newField", "newValue"); + expect(record.isDirty()).toBe(true); + expect(record.get("newField")).toBe("newValue"); + + // Test error handling + expect(record.hasError()).toBe(false); + record.err("email", "Invalid email"); + expect(record.hasError()).toBe(true); + expect(record.hasError("email")).toBe(true); + + // Test conversion back to JSON + const jsonRecord = record.toJSON(); + expect(jsonRecord.__k).toBe("test_id"); + expect(jsonRecord.name).toBe("Test"); + expect(jsonRecord.newField).toBe("newValue"); + }); + }); + + describe("getStreaming", () => { + it("should stream FlatfileRecord objects with ReadableStream", async () => { + const testData = `{"__k":"stream_test_id","name":"StreamTest"}`; + const encoder = new TextEncoder(); + const chunk = encoder.encode(testData); + + const mockStream = new ReadableStream({ + start(controller) { + controller.enqueue(chunk); + controller.close(); + }, + }); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + body: mockStream, + }); + + const results: FlatfileRecord[] = []; + for await (const record of recordsV2.getStreaming(sheetId, {}, defaultRequestOptions)) { + results.push(record); + } + + expect(results).toHaveLength(1); + const record = results[0]; + expect(record).toBeInstanceOf(FlatfileRecord); + expect(record.id).toBe("stream_test_id"); + expect(record.str("name")).toBe("StreamTest"); + + // Test FlatfileRecord methods work on streamed records + expect(record.has("name")).toBe(true); + expect(record.defStr("name")).toBe("StreamTest"); + }); + + it("should handle streaming without ReadableStream support for getStreaming", async () => { + const fixtureData = getFixtureData(); + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(fixtureData), + body: null, // No ReadableStream support + }); + + const results: FlatfileRecord[] = []; + for await (const record of recordsV2.getStreaming(sheetId, {}, defaultRequestOptions)) { + results.push(record); + } + + expect(results).toHaveLength(21); + const firstRecord = results[0]; + expect(firstRecord).toBeInstanceOf(FlatfileRecord); + expect(firstRecord.id).toBe("dev_rc_a5d2afda7dda4149afe51229e2674906"); + expect(firstRecord.str("firstname")).toBe("John"); + }); + + it("should handle multiple FlatfileRecord objects in stream", async () => { + const testData = `{"__k":"record1","name":"First"} +{"__k":"record2","name":"Second"} +{"__k":"record3","name":"Third"}`; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(testData), + body: null, + }); + + const results: FlatfileRecord[] = []; + let recordCount = 0; + for await (const record of recordsV2.getStreaming(sheetId, {}, defaultRequestOptions)) { + expect(record).toBeInstanceOf(FlatfileRecord); + results.push(record); + recordCount++; + + // Test each record individually as it streams + if (recordCount === 1) { + expect(record.id).toBe("record1"); + expect(record.str("name")).toBe("First"); + } else if (recordCount === 2) { + expect(record.id).toBe("record2"); + expect(record.str("name")).toBe("Second"); + } else if (recordCount === 3) { + expect(record.id).toBe("record3"); + expect(record.str("name")).toBe("Third"); + } + } + + expect(results).toHaveLength(3); + expect(recordCount).toBe(3); + }); + + it("should handle manipulation of streamed FlatfileRecord objects", async () => { + const testData = `{"__k":"manipulation_test","email":"invalid-email","age":"25"}`; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(testData), + body: null, + }); + + const results: FlatfileRecord[] = []; + for await (const record of recordsV2.getStreaming(sheetId, {}, defaultRequestOptions)) { + // Test type conversion on streamed record + expect(record.num("age")).toBe(25); + expect(record.str("email")).toBe("invalid-email"); + + // Test manipulation on streamed record + if (record.str("email") && !record.str("email")!.includes("@")) { + record.err("email", "Invalid email format"); + } + + record.set("processed", true); + results.push(record); + } + + const record = results[0]; + expect(record.hasError("email")).toBe(true); + expect(record.get("processed")).toBe(true); + expect(record.isDirty()).toBe(true); + }); + }); + + describe("write", () => { + it("should write FlatfileRecord objects with changesets", async () => { + const mockResponse = { success: true, created: 1, updated: 1 }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + // Create records with changes + const newRecord = new FlatfileRecord({ firstName: "John", lastName: "Doe" } as any); + newRecord.set("email", "john@example.com"); + + const existingRecord = new FlatfileRecord({ __k: "us_rc_123", firstName: "Jane" } as any); + existingRecord.set("lastName", "Smith"); + + const records = [newRecord, existingRecord]; + + const result = await recordsV2.write(records, { sheetId }, defaultRequestOptions); + + expect(mockFetch).toHaveBeenCalledTimes(1); + + const fetchCall = mockFetch.mock.calls[0]; + expect(fetchCall[0]).toContain("/v2-alpha/records.jsonl"); + expect(fetchCall[1].method).toBe("POST"); + expect(fetchCall[1].headers).toMatchObject({ + Authorization: "Bearer test-token", + "Content-Type": "application/jsonl", + "X-Sheet-Id": sheetId, + }); + + // Check the body contains changesets, not full records + const bodyLines = fetchCall[1].body.split("\n").filter((line: string) => line.trim()); + expect(bodyLines).toHaveLength(2); + + const firstChangeset = JSON.parse(bodyLines[0]); + expect(firstChangeset).toMatchObject({ + firstName: "John", + lastName: "Doe", + email: "john@example.com", + __s: sheetId, + }); + + const secondChangeset = JSON.parse(bodyLines[1]); + expect(secondChangeset).toMatchObject({ + __k: "us_rc_123", + lastName: "Smith", + __s: sheetId, + }); + + expect(result).toEqual(mockResponse); + + // Verify records are committed (changes cleared) + expect(newRecord.isDirty()).toBe(false); // Committed, no longer dirty + expect(existingRecord.isDirty()).toBe(false); // Has __k, changes cleared + }); + + it("should write full records when truncate is true", async () => { + const mockResponse = { success: true, created: 2, updated: 0 }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + const record1 = new FlatfileRecord({ __k: "us_rc_123", firstName: "John", lastName: "Doe" } as any); + const record2 = new FlatfileRecord({ __k: "us_rc_456", firstName: "Jane", lastName: "Smith" } as any); + + // Make some changes + record1.set("email", "john@example.com"); + record2.set("phone", "555-1234"); + + const records = [record1, record2]; + + const result = await recordsV2.write(records, { sheetId, truncate: true }, defaultRequestOptions); + + const fetchCall = mockFetch.mock.calls[0]; + const bodyLines = fetchCall[1].body.split("\n").filter((line: string) => line.trim()); + expect(bodyLines).toHaveLength(2); + + // With truncate=true, should get full records, not just changesets + const firstRecord = JSON.parse(bodyLines[0]); + expect(firstRecord).toMatchObject({ + __k: "us_rc_123", + firstName: "John", + lastName: "Doe", + email: "john@example.com", + __s: sheetId, + }); + + const secondRecord = JSON.parse(bodyLines[1]); + expect(secondRecord).toMatchObject({ + __k: "us_rc_456", + firstName: "Jane", + lastName: "Smith", + phone: "555-1234", + __s: sheetId, + }); + + expect(result).toEqual(mockResponse); + }); + + it("should throw error when no records provided", async () => { + await expect(recordsV2.write([], { sheetId }, defaultRequestOptions)).rejects.toThrow( + "No records provided to write." + ); + }); + + it("should throw error when no changes to write", async () => { + const record = new FlatfileRecord({ __k: "us_rc_123", firstName: "John" } as any); + // Don't make any changes - record is not dirty + + await expect(recordsV2.write([record], { sheetId }, defaultRequestOptions)).rejects.toThrow( + "No changes made to the records that would need to be written." + ); + }); + + it("should filter out deleted temporary records", async () => { + const mockResponse = { success: true, created: 1, updated: 0 }; + + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + const validRecord = new FlatfileRecord({ firstName: "John" } as any); + validRecord.set("email", "john@example.com"); + + const tempRecord = new FlatfileRecord({} as any); + (tempRecord as any)._tempId = "TEMP_123"; + tempRecord.delete(); + tempRecord.set("firstName", "Temp"); + + const records = [validRecord, tempRecord]; + + await recordsV2.write(records, { sheetId }, defaultRequestOptions); + + const fetchCall = mockFetch.mock.calls[0]; + const bodyLines = fetchCall[1].body.split("\n").filter((line: string) => line.trim()); + + // Should only have one record (temp deleted record filtered out) + expect(bodyLines).toHaveLength(1); + + const recordData = JSON.parse(bodyLines[0]); + expect(recordData).toMatchObject({ + firstName: "John", + email: "john@example.com", + }); + }); + }); + + describe("writeStreaming", () => { + it("should stream FlatfileRecord objects with changesets", async () => { + const mockResponse = { success: true, created: 3, updated: 0 }; + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + async function* generateRecords() { + for (let i = 1; i <= 3; i++) { + const record = new FlatfileRecord({ firstName: `User${i}` } as any); + record.set("email", `user${i}@example.com`); + record.set("processed", true); + yield record; + } + } + + const result = await recordsV2.writeStreaming(generateRecords(), { sheetId }, defaultRequestOptions); + + expect(mockFetch).toHaveBeenCalledTimes(1); + expect(result).toEqual(mockResponse); + + // Check that the body is a ReadableStream-like object + const fetchCall = mockFetch.mock.calls[0]; + expect(fetchCall[1].body).toBeDefined(); + }); + + it("should handle truncate mode in streaming", async () => { + const mockResponse = { success: true, created: 2, updated: 0 }; + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + async function* generateRecords() { + const record1 = new FlatfileRecord({ __k: "us_rc_123", firstName: "John", lastName: "Doe" } as any); + record1.set("email", "john@example.com"); + yield record1; + + const record2 = new FlatfileRecord({ __k: "us_rc_456", firstName: "Jane" } as any); + record2.set("lastName", "Smith"); + yield record2; + } + + const result = await recordsV2.writeStreaming( + generateRecords(), + { sheetId, truncate: true }, + defaultRequestOptions + ); + + expect(result).toEqual(mockResponse); + }); + + it("should handle empty stream when truncate is true", async () => { + const mockResponse = { success: true, created: 0, updated: 0 }; + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + async function* generateRecords() { + // Empty generator + return; + } + + const result = await recordsV2.writeStreaming( + generateRecords(), + { sheetId, truncate: true }, + defaultRequestOptions + ); + + expect(result).toEqual(mockResponse); + }); + + it("should throw error when no changes in streaming mode", async () => { + async function* generateRecords() { + // Yield records with no changes + yield new FlatfileRecord({ __k: "us_rc_123", firstName: "John" } as any); + yield new FlatfileRecord({ __k: "us_rc_456", firstName: "Jane" } as any); + } + + // The error should be thrown during the ReadableStream creation/processing + await expect( + recordsV2.writeStreaming(generateRecords(), { sheetId }, defaultRequestOptions) + ).rejects.toThrow(); // Just check that it throws, not the specific message + }); + + it("should filter deleted temporary records in streaming", async () => { + const mockResponse = { success: true, created: 1, updated: 0 }; + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + async function* generateRecords() { + // Valid record with changes + const validRecord = new FlatfileRecord({ firstName: "John" } as any); + validRecord.set("email", "john@example.com"); + yield validRecord; + + // Temporary deleted record - should be filtered out + const tempRecord = new FlatfileRecord({} as any); + (tempRecord as any)._tempId = "TEMP_456"; + tempRecord.delete(); + tempRecord.set("firstName", "TempUser"); + yield tempRecord; + } + + const result = await recordsV2.writeStreaming(generateRecords(), { sheetId }, defaultRequestOptions); + + expect(result).toEqual(mockResponse); + }); + + it("should commit all processed records after successful write", async () => { + const mockResponse = { success: true, created: 2, updated: 0 }; + mockFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + text: () => Promise.resolve(JSON.stringify(mockResponse)), + }); + + const processedRecords: FlatfileRecord[] = []; + + async function* generateRecords() { + const record1 = new FlatfileRecord({ firstName: "User1" } as any); + record1.set("email", "user1@example.com"); + processedRecords.push(record1); + yield record1; + + const record2 = new FlatfileRecord({ firstName: "User2" } as any); + record2.set("email", "user2@example.com"); + processedRecords.push(record2); + yield record2; + } + + // Verify records are dirty before write + const recordsArray: FlatfileRecord[] = []; + for await (const record of generateRecords()) { + recordsArray.push(record); + } + + expect(recordsArray[0].isDirty()).toBe(true); + expect(recordsArray[1].isDirty()).toBe(true); + + // Reset and do the actual write + async function* generateRecordsAgain() { + yield recordsArray[0]; + yield recordsArray[1]; + } + + await recordsV2.writeStreaming(generateRecordsAgain(), { sheetId }, defaultRequestOptions); + + // Verify records are committed after successful write (changes cleared) + expect(recordsArray[0].isDirty()).toBe(false); // Committed, no longer dirty + expect(recordsArray[1].isDirty()).toBe(false); // Committed, no longer dirty + }); + }); + + describe("FlatfileRecord changeset logic", () => { + it("should handle commit() behavior for new records", () => { + // New record should be dirty initially + const newRecord = new FlatfileRecord({ firstName: 'John' } as any); + expect(newRecord.isDirty()).toBe(true); + + // After commit, should no longer be dirty + newRecord.commit(); + expect(newRecord.isDirty()).toBe(false); + + // Making changes should make it dirty again + newRecord.set('lastName', 'Doe'); + expect(newRecord.isDirty()).toBe(true); + + // After commit, should be clean again + newRecord.commit(); + expect(newRecord.isDirty()).toBe(false); + }); + + it("should reset committed flag when any state is modified", () => { + const record = new FlatfileRecord({ firstName: 'John' } as any); + record.commit(); + expect(record.isDirty()).toBe(false); + + // Test set() + record.set('lastName', 'Doe'); + expect(record.isDirty()).toBe(true); + record.commit(); + expect(record.isDirty()).toBe(false); + + // Test err() + record.err('firstName', 'Invalid name'); + expect(record.isDirty()).toBe(true); + record.commit(); + expect(record.isDirty()).toBe(false); + + // Test warn() + record.warn('firstName', 'Warning message'); + expect(record.isDirty()).toBe(true); + record.commit(); + expect(record.isDirty()).toBe(false); + + // Test info() + record.info('firstName', 'Info message'); + expect(record.isDirty()).toBe(true); + record.commit(); + expect(record.isDirty()).toBe(false); + + // Test delete() + record.delete(); + expect(record.isDirty()).toBe(true); + }); + + it("should manually mark record as dirty with setDirty()", () => { + // Test with new record + const newRecord = new FlatfileRecord({ firstName: 'John', lastName: 'Doe' } as any); + newRecord.commit(); + expect(newRecord.isDirty()).toBe(false); + + newRecord.setDirty(); + expect(newRecord.isDirty()).toBe(true); + + // Verify changeset includes all data for new record + const newChangeset = newRecord.changeset(); + expect(newChangeset).toEqual({ firstName: 'John', lastName: 'Doe' }); + + // Test with existing record (has __k) + const existingRecord = new FlatfileRecord({ __k: '123', firstName: 'Jane', age: 30 } as any); + existingRecord.commit(); + expect(existingRecord.isDirty()).toBe(false); + + existingRecord.setDirty(); + expect(existingRecord.isDirty()).toBe(true); + + // Verify changeset includes all non-system fields for existing record + const existingChangeset = existingRecord.changeset(); + expect(existingChangeset).toEqual({ __k: '123', firstName: 'Jane', age: 30 }); + + // Test that setDirty returns this for chaining + const chainResult = existingRecord.setDirty(); + expect(chainResult).toBe(existingRecord); + }); + + it("should handle new vs existing record changesets correctly", () => { + // New record (no __k) - changeset should include all data + const newRecord = new FlatfileRecord({ + firstName: 'John', + lastName: 'Doe' + } as any); + newRecord.set('email', 'john@example.com'); + + const newChangeset = newRecord.changeset(); + expect(newChangeset).toMatchObject({ + firstName: 'John', + lastName: 'Doe', + email: 'john@example.com' + }); + + // Existing record (with __k) - changeset should only include changes + const existingRecord = new FlatfileRecord({ + __k: 'us_rc_123', + firstName: 'Jane', + lastName: 'Smith', + email: 'jane@example.com' + } as any); + existingRecord.set('email', 'jane.smith@newdomain.com'); + + const existingChangeset = existingRecord.changeset(); + expect(existingChangeset).toMatchObject({ + __k: 'us_rc_123', + email: 'jane.smith@newdomain.com' + }); + expect(existingChangeset.firstName).toBeUndefined(); + expect(existingChangeset.lastName).toBeUndefined(); + }); + + it("should have consistent isDirty and changeset behavior", () => { + // New record should be dirty (has data to write) + const newRecord = new FlatfileRecord({ + firstName: 'John', + lastName: 'Doe' + } as any); + + expect(newRecord.isDirty()).toBe(true); // New records are always dirty + const newChangeset = newRecord.changeset(); + expect(Object.keys(newChangeset).length).toBeGreaterThan(0); // Has data + + // Existing record with no changes should not be dirty + const existingRecord = new FlatfileRecord({ + __k: 'us_rc_123', + firstName: 'Jane' + } as any); + + expect(existingRecord.isDirty()).toBe(false); // No changes made + + // But after making changes, should be dirty + existingRecord.set('email', 'jane@example.com'); + expect(existingRecord.isDirty()).toBe(true); // Now has changes + + const existingChangeset = existingRecord.changeset(); + expect(existingChangeset).toMatchObject({ + __k: 'us_rc_123', + email: 'jane@example.com' + }); + }); + }); });