@@ -124,48 +124,3 @@ export const connectMongoForTests = (url: string, isCI: boolean) => {
124124 } ) ;
125125 return new PowerSyncMongo ( client ) ;
126126} ;
127-
128- /**
129- * MongoDB bulkWrite internally splits the operations into batches
130- * so that no batch exceeds 16MB. However, there are cases where
131- * the batch size is very close to 16MB, where additional metadata
132- * on the server pushes it over the limit, resulting in this error
133- * from the server:
134- *
135- * > MongoBulkWriteError: BSONObj size: 16814023 (0x1008FC7) is invalid. Size must be between 0 and 16793600(16MB) First element: insert: "bucket_data"
136- *
137- * We work around the issue by doing our own batching, limiting the
138- * batch size to 15MB. This does add additional overhead with
139- * BSON.calculateObjectSize.
140- */
141- export async function safeBulkWrite < T extends mongo . Document > (
142- collection : mongo . Collection < T > ,
143- operations : mongo . AnyBulkWriteOperation < T > [ ] ,
144- options : mongo . BulkWriteOptions
145- ) {
146- // Must be below 16MB.
147- // We could probably go a little closer, but 15MB is a safe threshold.
148- const BULK_WRITE_LIMIT = 15 * 1024 * 1024 ;
149-
150- let batch : mongo . AnyBulkWriteOperation < T > [ ] = [ ] ;
151- let currentSize = 0 ;
152- // Estimated overhead per operation, should be smaller in reality.
153- const keySize = 8 ;
154- for ( let op of operations ) {
155- const bsonSize =
156- mongo . BSON . calculateObjectSize ( op , {
157- checkKeys : false ,
158- ignoreUndefined : true
159- } as any ) + keySize ;
160- if ( batch . length > 0 && currentSize + bsonSize > BULK_WRITE_LIMIT ) {
161- await collection . bulkWrite ( batch , options ) ;
162- currentSize = 0 ;
163- batch = [ ] ;
164- }
165- batch . push ( op ) ;
166- currentSize += bsonSize ;
167- }
168- if ( batch . length > 0 ) {
169- await collection . bulkWrite ( batch , options ) ;
170- }
171- }
0 commit comments