Skip to content

Commit a9c12a2

Browse files
waleedlatif1claude
andauthored
feat(tables): add export, import column creation, infinite row pagination (#4373)
* feat(tables): add export, import column creation, infinite row pagination - Add `/api/table/[tableId]/export` route streaming CSV/JSON downloads - Rename `/import-csv` route to `/import` and extend to auto-create new columns from unmapped CSV headers via `createColumns` form field - Switch table view to `useInfiniteQuery` so tables larger than 1000 rows fully load; reconcile created rows into the paginated cache so "New row" past 1000 no longer reverts on invalidate - Wire scroll-driven prefetch (600px from bottom) and pre-drain pages before append to keep new-row position consistent - Polish import-csv dialog flow and add Export action to header Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com> * fix(table): route boundary validation through Zod contracts Switch the table query hooks and the import/export routes to the codebase's contract-based request pattern so the API validation audit and boundary policy ratchet pass. - `hooks/queries/tables.ts` now calls every endpoint via `requestJson(contract, ...)`; the only remaining raw `fetch` calls are the streaming export download and the multipart CSV upload, both annotated `boundary-raw-fetch:`. - The import and export routes parse `params`, the `format` query, and the multipart form fields with shared schemas from `@/lib/api/contracts/tables`. Two new contract schemas (`csvImportCreateColumnsSchema`, `tableExportFormatSchema`) cover the fields specific to these routes. - Bumps the audit baseline by one route to account for the new export endpoint. * fix(table): address PR bot review - Move the `isAppendingRowRef` reset into the create-row mutation's `onSettled` callback. The previous `try/finally` cleared the guard immediately after `mutate()` returned, before the request completed, so a rapid second click on "New row" could fire a duplicate create. - Drop the unused `addTableColumns` wrapper from `lib/table/service`. The CSV import flow only ever uses the transaction-bound `addTableColumnsWithTx`; the standalone wrapper was dead code. * fix(table): re-throw on infinite-query fetch error in append-row drain `useInfiniteQuery.fetchNextPage()` resolves (rather than rejects) when a page request fails — the resolved value carries `status: 'error'` while `hasNextPage` still reflects the last successful page. The drain loop in `handleAppendRow` relied on a thrown error to bail, so a failed mid-drain fetch could spin indefinitely and leave the append guard stuck on. Re-throw inside `fetchNextPageWrapped` when the result is an error so the caller's `try/catch` runs as intended. * fix(table): import TABLE_LIMITS from constants to keep server code out of client bundle The client hook `use-table-data.ts` was importing `TABLE_LIMITS` as a value from the `@/lib/table` barrel, which transitively pulls in `service.ts` and the `postgres` driver. Turbopack then tried to bundle `fs`, `net`, `tls`, and `perf_hooks` into the client component graph and the production build failed. Import `TABLE_LIMITS` directly from `@/lib/table/constants` (a pure constants module) and keep the type imports against the barrel. * fix(table): run batch unique check inside import transaction `checkBatchUniqueConstraintsDb` queried the global `db` connection, so inside a single import transaction (one tx wrapping all batches) the constraint lookup couldn't see uncommitted rows from prior batches — duplicates that crossed `CSV_MAX_BATCH_SIZE` boundaries slipped through. Accept an optional executor and pass `trx` from `batchInsertRowsWithTx` so the lookup observes the in-flight transaction state. --------- Co-authored-by: Claude Opus 4.7 <noreply@anthropic.com>
1 parent 47208e0 commit a9c12a2

16 files changed

Lines changed: 1142 additions & 267 deletions

File tree

Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
import { createLogger } from '@sim/logger'
2+
import { type NextRequest, NextResponse } from 'next/server'
3+
import { tableExportFormatSchema, tableIdParamsSchema } from '@/lib/api/contracts/tables'
4+
import { getValidationErrorMessage } from '@/lib/api/server'
5+
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
6+
import { generateRequestId } from '@/lib/core/utils/request'
7+
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
8+
import { queryRows } from '@/lib/table/service'
9+
import { accessError, checkAccess } from '@/app/api/table/utils'
10+
11+
const logger = createLogger('TableExport')
12+
13+
const EXPORT_BATCH_SIZE = 1000
14+
15+
type ExportFormat = 'csv' | 'json'
16+
17+
interface RouteParams {
18+
params: Promise<{ tableId: string }>
19+
}
20+
21+
/** GET /api/table/[tableId]/export - Streams the full table contents as CSV or JSON. */
22+
export const GET = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => {
23+
const requestId = generateRequestId()
24+
const { tableId } = tableIdParamsSchema.parse(await params)
25+
26+
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
27+
if (!auth.success || !auth.userId) {
28+
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
29+
}
30+
31+
const { searchParams } = new URL(request.url)
32+
const formatValidation = tableExportFormatSchema.safeParse(
33+
searchParams.get('format') ?? undefined
34+
)
35+
if (!formatValidation.success) {
36+
return NextResponse.json(
37+
{ error: getValidationErrorMessage(formatValidation.error) },
38+
{ status: 400 }
39+
)
40+
}
41+
const format: ExportFormat = formatValidation.data
42+
43+
const access = await checkAccess(tableId, auth.userId, 'read')
44+
if (!access.ok) return accessError(access, requestId, tableId)
45+
const { table } = access
46+
47+
const columns = table.schema.columns
48+
const safeName = sanitizeFilename(table.name)
49+
const filename = `${safeName}.${format}`
50+
51+
const stream = new ReadableStream<Uint8Array>({
52+
async start(controller) {
53+
const encoder = new TextEncoder()
54+
try {
55+
if (format === 'csv') {
56+
controller.enqueue(encoder.encode(`${toCsvRow(columns.map((c) => c.name))}\n`))
57+
} else {
58+
controller.enqueue(encoder.encode('['))
59+
}
60+
61+
let offset = 0
62+
let firstJsonRow = true
63+
while (true) {
64+
const result = await queryRows(
65+
tableId,
66+
table.workspaceId,
67+
{ limit: EXPORT_BATCH_SIZE, offset, includeTotal: false },
68+
requestId
69+
)
70+
71+
for (const row of result.rows) {
72+
if (format === 'csv') {
73+
const values = columns.map((c) => formatCsvValue(row.data[c.name]))
74+
controller.enqueue(encoder.encode(`${toCsvRow(values)}\n`))
75+
} else {
76+
const prefix = firstJsonRow ? '' : ','
77+
firstJsonRow = false
78+
controller.enqueue(encoder.encode(prefix + JSON.stringify({ ...row.data })))
79+
}
80+
}
81+
82+
if (result.rows.length < EXPORT_BATCH_SIZE) break
83+
offset += result.rows.length
84+
}
85+
86+
if (format === 'json') controller.enqueue(encoder.encode(']'))
87+
controller.close()
88+
89+
logger.info(`[${requestId}] Exported table ${tableId}`, {
90+
format,
91+
rowCount: table.rowCount,
92+
})
93+
} catch (err) {
94+
logger.error(`[${requestId}] Export failed for table ${tableId}`, err)
95+
controller.error(err)
96+
}
97+
},
98+
})
99+
100+
return new NextResponse(stream, {
101+
status: 200,
102+
headers: {
103+
'Content-Type': format === 'csv' ? 'text/csv; charset=utf-8' : 'application/json',
104+
'Content-Disposition': `attachment; filename="${filename}"`,
105+
'Cache-Control': 'no-store',
106+
},
107+
})
108+
})
109+
110+
function sanitizeFilename(name: string): string {
111+
const cleaned = name.replace(/[^a-zA-Z0-9_-]+/g, '_').replace(/^_+|_+$/g, '')
112+
return cleaned || 'table'
113+
}
114+
115+
function formatCsvValue(value: unknown): string {
116+
if (value === null || value === undefined) return ''
117+
if (value instanceof Date) return value.toISOString()
118+
if (typeof value === 'object') return JSON.stringify(value)
119+
return String(value)
120+
}
121+
122+
function toCsvRow(values: string[]): string {
123+
return values.map(escapeCsvField).join(',')
124+
}
125+
126+
function escapeCsvField(field: string): string {
127+
if (/[",\n\r]/.test(field)) {
128+
return `"${field.replace(/"/g, '""')}"`
129+
}
130+
return field
131+
}

0 commit comments

Comments
 (0)