Skip to content

Commit 79abf05

Browse files
Implement batching during initial sync
1 parent ffa68d1 commit 79abf05

File tree

2 files changed

+50
-18
lines changed

2 files changed

+50
-18
lines changed

packages/powersync-db-collection/src/definitions.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,19 @@ export type PowerSyncCollectionConfig<
4646
tableName: string
4747
/** The PowerSync database instance */
4848
database: AbstractPowerSyncDatabase
49+
/**
50+
* The maximum number of documents to read from the SQLite table
51+
* in a single batch during the initial sync between PowerSync and the
52+
* in-memory TanStack DB collection.
53+
*
54+
* @remarks
55+
* - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.
56+
* - Larger values reduce the number of round trips to the storage
57+
* engine but increase memory usage per batch.
58+
* - Smaller values may lower memory usage and allow earlier
59+
* streaming of initial results, at the cost of more query calls.
60+
*/
61+
syncBatchSize?: number
4962
}
5063

5164
export type PowerSyncCollectionMeta = {
@@ -71,3 +84,8 @@ export type EnhancedPowerSyncCollectionConfig<
7184
export type PowerSyncCollectionUtils = {
7285
getMeta: () => PowerSyncCollectionMeta
7386
}
87+
88+
/**
89+
* Default value for {@link PowerSyncCollectionConfig#syncBatchSize}
90+
*/
91+
export const DEFAULT_BATCH_SIZE = 1000

packages/powersync-db-collection/src/powersync.ts

Lines changed: 32 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,21 @@
1-
import { DiffTriggerOperation } from "@powersync/common"
1+
import { DiffTriggerOperation, sanitizeSQL } from "@powersync/common"
2+
import { DEFAULT_BATCH_SIZE } from "./definitions"
23
import { asPowerSyncRecord, mapOperation } from "./helpers"
34
import { PendingOperationStore } from "./PendingOperationStore"
45
import { PowerSyncTransactor } from "./PowerSyncTransactor"
5-
import type { TriggerDiffRecord } from "@powersync/common"
6-
import type { StandardSchemaV1 } from "@standard-schema/spec"
7-
import type {
8-
CollectionConfig,
9-
InferSchemaOutput,
10-
SyncConfig,
11-
} from "@tanstack/db"
126
import type {
137
EnhancedPowerSyncCollectionConfig,
148
PowerSyncCollectionConfig,
159
PowerSyncCollectionUtils,
1610
} from "./definitions"
1711
import type { PendingOperation } from "./PendingOperationStore"
12+
import type {
13+
CollectionConfig,
14+
InferSchemaOutput,
15+
SyncConfig,
16+
} from "@tanstack/db"
17+
import type { StandardSchemaV1 } from "@standard-schema/spec"
18+
import type { TriggerDiffRecord } from "@powersync/common"
1819

1920
/**
2021
* Creates PowerSync collection options for use with a standard Collection
@@ -100,7 +101,12 @@ export function powerSyncCollectionOptions<
100101
>(
101102
config: PowerSyncCollectionConfig<T, TSchema>
102103
): EnhancedPowerSyncCollectionConfig<T, TSchema> {
103-
const { database, tableName, ...restConfig } = config
104+
const {
105+
database,
106+
tableName,
107+
syncBatchSize = DEFAULT_BATCH_SIZE,
108+
...restConfig
109+
} = config
104110

105111
/**
106112
* The onInsert, onUpdate, onDelete handlers should only return
@@ -202,16 +208,24 @@ export function powerSyncCollectionOptions<
202208
},
203209
hooks: {
204210
beforeCreate: async (context) => {
205-
begin()
206-
for (const row of await context.getAll<T>(
207-
`SELECT * FROM ${tableName}`
208-
)) {
209-
write({
210-
type: `insert`,
211-
value: row,
212-
})
211+
let currentBatchCount = syncBatchSize
212+
let cursor = 0
213+
while (currentBatchCount == syncBatchSize) {
214+
begin()
215+
const batchItems = await context.getAll<T>(
216+
sanitizeSQL`SELECT * FROM ${tableName} LIMIT ? OFFSET ?`,
217+
[syncBatchSize, cursor]
218+
)
219+
currentBatchCount = batchItems.length
220+
cursor += currentBatchCount
221+
for (const row of batchItems) {
222+
write({
223+
type: `insert`,
224+
value: row,
225+
})
226+
}
227+
commit()
213228
}
214-
commit()
215229
markReady()
216230
database.logger.info(`Sync is ready`)
217231
},

0 commit comments

Comments
 (0)