diff --git a/lib/src/http_request.dart b/lib/src/http_request.dart index f42a26b5..02d572af 100644 --- a/lib/src/http_request.dart +++ b/lib/src/http_request.dart @@ -29,6 +29,7 @@ abstract class HttpRequest { String path, { Object? data, Map? queryParameters, + String contentType, }); /// POST method @@ -36,6 +37,7 @@ abstract class HttpRequest { String path, { Object? data, Map? queryParameters, + String contentType, }); /// PUT method @@ -43,6 +45,7 @@ abstract class HttpRequest { String path, { Object? data, Map? queryParameters, + String contentType, }); /// DELETE method diff --git a/lib/src/http_request_impl.dart b/lib/src/http_request_impl.dart index 09106d77..3f5d8f99 100644 --- a/lib/src/http_request_impl.dart +++ b/lib/src/http_request_impl.dart @@ -17,7 +17,6 @@ class HttpRequestImpl implements HttpRequest { if (_kIsWeb) Version.qualifiedVersionWeb ].join(',') }, - contentType: 'application/json', responseType: ResponseType.json, connectTimeout: connectTimeout ?? Duration(seconds: 5), ), @@ -61,12 +60,16 @@ class HttpRequestImpl implements HttpRequest { String path, { Object? data, Map? queryParameters, + String contentType = Headers.jsonContentType, }) async { try { return await dio.post( path, data: data, queryParameters: queryParameters, + options: Options( + contentType: contentType, + ), ); } on DioError catch (e) { return throwException(e); @@ -78,12 +81,16 @@ class HttpRequestImpl implements HttpRequest { String path, { Object? data, Map? queryParameters, + String contentType = Headers.jsonContentType, }) async { try { return await dio.patch( path, data: data, queryParameters: queryParameters, + options: Options( + contentType: contentType, + ), ); } on DioError catch (e) { return throwException(e); @@ -95,12 +102,16 @@ class HttpRequestImpl implements HttpRequest { String path, { Object? data, Map? queryParameters, + String contentType = Headers.jsonContentType, }) async { try { return await dio.put( path, data: data, queryParameters: queryParameters, + options: Options( + contentType: contentType, + ), ); } on DioError catch (e) { return throwException(e); diff --git a/lib/src/index.dart b/lib/src/index.dart index d1b0bf40..7268cf0a 100644 --- a/lib/src/index.dart +++ b/lib/src/index.dart @@ -55,36 +55,161 @@ abstract class MeiliSearchIndex { /// Return a list of all existing documents in the index. Future>> getDocuments({DocumentsQuery? params}); + /// {@template meili.add_docs} /// Add a list of documents by given [documents] and optional [primaryKey] parameter. - /// If index is not exists tries to create a new index and adds documents. + /// {@endtemplate} + /// + /// {@template meili.index_upsert} + /// If the index does not exist, tries to create a new index and adds documents. + /// {@endtemplate} Future addDocuments( List> documents, { String? primaryKey, }); + /// {@macro meili.add_docs} + /// + /// * The passed [documents] must be a valid JSON string representing an array of objects. + /// * + /// {@macro meili.index_upsert} + Future addDocumentsJson( + String documents, { + String? primaryKey, + }); + + /// {@macro meili.add_docs} + /// + /// * + /// {@template meili.csv} + /// The passed documents must be a valid CSV string, where the first line contains objects' keys and types, and each subsequent line corresponds to an object. + /// [see relevant documentation](https://docs.meilisearch.com/learn/core_concepts/documents.html#csv) + /// {@endtemplate} + /// + /// * + /// {@macro meili.index_upsert} + Future addDocumentsCsv( + String documents, { + String? primaryKey, + }); + + /// {@macro meili.add_docs} + /// + /// * The passed [documents] must be a valid Newline Delimited Json (NdJson) string, where each line corresponds to an object. + /// * + /// {@macro meili.index_upsert} + Future addDocumentsNdjson( + String documents, { + String? primaryKey, + }); + + /// {@template meili.add_docs_batches} /// Add a list of documents in batches of size [batchSize] by given [documents] and optional [primaryKey] parameter. - /// If the index does not exist try to create a new index and add documents. + /// {@endtemplate} + /// + /// {@macro meili.index_upsert} Future> addDocumentsInBatches( List> documents, { int batchSize = 1000, String? primaryKey, }); + /// {@macro meili.add_docs_batches} + /// + /// * + /// {@macro meili.csv} + /// * + /// {@macro meili.index_upsert} + Future> addDocumentsCsvInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }); + + /// {@macro meili.add_docs_batches} + /// + /// * The passed [documents] must be a valid Newline Delimited Json (NdJson) string, where each line corresponds to an object. + /// * + /// {@macro meili.index_upsert} + Future> addDocumentsNdjsonInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }); + + /// {@template meili.update_docs} /// Add a list of documents or update them if they already exist by given [documents] and optional [primaryKey] parameter. - /// If index is not exists tries to create a new index and adds documents. + /// {@endtemplate} + /// + /// {@macro meili.index_upsert} Future updateDocuments( List> documents, { String? primaryKey, }); + /// {@macro meili.update_docs} + /// + /// * the passed [documents] must be a valid JSON string representing an array of objects. + /// * + /// {@macro meili.index_upsert} + Future updateDocumentsJson( + String documents, { + String? primaryKey, + }); + + /// {@macro meili.update_docs} + /// + /// * The passed [documents] must be a valid Newline Delimited Json (NdJson) string, where each line corresponds to an object. + /// * + /// {@macro meili.index_upsert} + Future updateDocumentsNdjson( + String documents, { + String? primaryKey, + }); + + /// {@macro meili.update_docs} + /// + /// * + /// {@macro meili.csv} + /// * + /// {@macro meili.index_upsert} + Future updateDocumentsCsv( + String documents, { + String? primaryKey, + }); + + /// {@template meili.update_docs_batches} /// Add a list of documents or update them if they already exist in batches of size [batchSize] by given [documents] and optional [primaryKey] parameter. - /// If index is not exists tries to create a new index and adds documents. + /// {@endtemplate} + /// + /// {@macro meili.index_upsert} Future> updateDocumentsInBatches( List> documents, { int batchSize = 1000, String? primaryKey, }); + /// {@macro meili.update_docs_batches} + /// + /// * The passed [documents] must be a valid CSV string, where each line corresponds to an object. + /// * + /// {@macro meili.index_upsert} + Future> updateDocumentsCsvInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }); + + /// {@macro meili.update_docs_batches} + /// + /// * The passed [documents] must be a valid Newline Delimited Json (NdJson) string, where each line corresponds to an object. + /// * + /// {@macro meili.index_upsert} + Future> updateDocumentsNdjsonInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }); + /// Delete one document by given [id]. Future deleteDocument(Object id); diff --git a/lib/src/index_impl.dart b/lib/src/index_impl.dart index 94a2c49b..b0871ae8 100644 --- a/lib/src/index_impl.dart +++ b/lib/src/index_impl.dart @@ -1,19 +1,24 @@ +import 'dart:convert'; import 'package:dio/dio.dart'; -import 'package:meilisearch/src/query_parameters/documents_query.dart'; -import 'package:meilisearch/src/query_parameters/tasks_query.dart'; -import 'package:meilisearch/src/result.dart'; -import 'package:meilisearch/src/searchable.dart'; -import 'package:meilisearch/src/tasks_results.dart'; +import 'result.dart'; +import 'searchable.dart'; +import 'tasks_results.dart'; import 'package:collection/collection.dart'; import 'client.dart'; +import 'exception.dart'; import 'filter_builder/filter_builder_base.dart'; -import 'index.dart'; import 'http_request.dart'; +import 'index.dart'; import 'index_settings.dart'; import 'matching_strategy_enum.dart'; +import 'query_parameters/documents_query.dart'; +import 'query_parameters/tasks_query.dart'; import 'stats.dart' show IndexStats; import 'task.dart'; +const _ndjsonContentType = 'application/x-ndjson'; +const _csvContentType = 'text/csv'; + class MeiliSearchIndexImpl implements MeiliSearchIndex { MeiliSearchIndexImpl( this.client, @@ -73,12 +78,12 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { if (primaryKey != null) 'primaryKey': primaryKey, }; - return await _update(http.patchMethod('/indexes/$uid', data: data)); + return await _getTask(http.patchMethod('/indexes/$uid', data: data)); } @override Future delete() async { - return await _update(http.deleteMethod('/indexes/$uid')); + return await _getTask(http.deleteMethod('/indexes/$uid')); } @override @@ -151,7 +156,7 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { // Document endpoints // - Future _update(Future>> future) async { + Future _getTask(Future>> future) async { final response = await future; return Task.fromMap(response.data!); } @@ -160,8 +165,8 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { Future addDocuments( documents, { String? primaryKey, - }) async { - return await _update(http.postMethod( + }) { + return _getTask(http.postMethod( '/indexes/$uid/documents', data: documents, queryParameters: { @@ -170,33 +175,220 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { )); } + @override + Future addDocumentsJson(String documents, {String? primaryKey}) { + final decoded = jsonDecode(documents); + + if (decoded is List) { + final casted = decoded.whereType>().toList(); + + return addDocuments(casted, primaryKey: primaryKey); + } + + throw MeiliSearchApiException( + "Provided json must be an array of documents, consider using addDocumentsNdjson if this isn't the case", + ); + } + + @override + Future addDocumentsCsv( + String documents, { + String? primaryKey, + }) { + return _getTask(http.postMethod( + '/indexes/$uid/documents', + data: documents, + queryParameters: { + if (primaryKey != null) 'primaryKey': primaryKey, + }, + contentType: _csvContentType, + )); + } + + @override + Future addDocumentsNdjson(String documents, {String? primaryKey}) { + return _getTask(http.postMethod( + '/indexes/$uid/documents', + data: documents, + queryParameters: { + if (primaryKey != null) 'primaryKey': primaryKey, + }, + contentType: _ndjsonContentType, + )); + } + + @override + Future> addDocumentsInBatches( + List> documents, { + int batchSize = 1000, + String? primaryKey, + }) => + Future.wait( + documents + .slices(batchSize) + .map((slice) => addDocuments(slice, primaryKey: primaryKey)), + ); + + @override + Future> addDocumentsCsvInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }) { + final ls = LineSplitter(); + final split = ls.convert(documents); + //header is shared for all slices + final header = split.first; + return Future.wait( + split.skip(1).slices(batchSize).map( + (slice) => addDocumentsCsv( + [header, ...slice].join('\n'), + primaryKey: primaryKey, + ), + ), + ); + } + + @override + Future> addDocumentsNdjsonInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }) { + final ls = LineSplitter(); + final split = ls.convert(documents); + + return Future.wait( + split.slices(batchSize).map( + (slice) => addDocumentsNdjson( + slice.join('\n'), + primaryKey: primaryKey, + ), + ), + ); + } + @override Future updateDocuments( documents, { String? primaryKey, }) async { - return await _update(http.putMethod( + return await _getTask(http.putMethod( + '/indexes/$uid/documents', + data: documents, + queryParameters: { + if (primaryKey != null) 'primaryKey': primaryKey, + }, + )); + } + + @override + Future updateDocumentsJson( + String documents, { + String? primaryKey, + }) { + final decoded = jsonDecode(documents); + + if (decoded is List) { + final casted = decoded.whereType>().toList(); + + return updateDocuments(casted, primaryKey: primaryKey); + } + + throw MeiliSearchApiException( + "Provided json must be an array of documents, consider using updateDocumentsNdjson if this isn't the case", + ); + } + + @override + Future updateDocumentsCsv(String documents, {String? primaryKey}) { + return _getTask(http.putMethod( + '/indexes/$uid/documents', + data: documents, + queryParameters: { + if (primaryKey != null) 'primaryKey': primaryKey, + }, + contentType: _csvContentType, + )); + } + + @override + Future updateDocumentsNdjson(String documents, {String? primaryKey}) { + return _getTask(http.putMethod( '/indexes/$uid/documents', data: documents, queryParameters: { if (primaryKey != null) 'primaryKey': primaryKey, }, + contentType: _ndjsonContentType, )); } + @override + Future> updateDocumentsInBatches( + List> documents, { + int batchSize = 1000, + String? primaryKey, + }) => + Future.wait( + documents + .slices(batchSize) + .map((slice) => updateDocuments(slice, primaryKey: primaryKey)), + ); + + @override + Future> updateDocumentsCsvInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }) { + final ls = LineSplitter(); + final split = ls.convert(documents); + //header is shared for all slices + final header = split.first; + + return Future.wait( + split.skip(1).slices(batchSize).map( + (slice) => updateDocumentsCsv( + [header, ...slice].join('\n'), + primaryKey: primaryKey, + ), + ), + ); + } + + @override + Future> updateDocumentsNdjsonInBatches( + String documents, { + String? primaryKey, + int batchSize = 1000, + }) { + final ls = LineSplitter(); + final split = ls.convert(documents); + + return Future.wait( + split.slices(batchSize).map( + (slice) => updateDocumentsNdjson( + slice.join('\n'), + primaryKey: primaryKey, + ), + ), + ); + } + @override Future deleteAllDocuments() async { - return await _update(http.deleteMethod('/indexes/$uid/documents')); + return await _getTask(http.deleteMethod('/indexes/$uid/documents')); } @override Future deleteDocument(Object? id) async { - return await _update(http.deleteMethod('/indexes/$uid/documents/$id')); + return await _getTask(http.deleteMethod('/indexes/$uid/documents/$id')); } @override Future deleteDocuments(List ids) async { - return await _update( + return await _getTask( http.postMethod( '/indexes/$uid/documents/delete-batch', data: ids, @@ -239,12 +431,12 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetSettings() async { - return await _update(http.deleteMethod('/indexes/$uid/settings')); + return await _getTask(http.deleteMethod('/indexes/$uid/settings')); } @override Future updateSettings(IndexSettings settings) async { - return await _update(http.patchMethod( + return await _getTask(http.patchMethod( '/indexes/$uid/settings', data: settings.toMap(), )); @@ -260,14 +452,14 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetFilterableAttributes() async { - return await _update( + return await _getTask( http.deleteMethod('/indexes/$uid/settings/filterable-attributes')); } @override Future updateFilterableAttributes( List filterableAttributes) async { - return await _update(http.putMethod( + return await _getTask(http.putMethod( '/indexes/$uid/settings/filterable-attributes', data: filterableAttributes)); } @@ -282,14 +474,14 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetDisplayedAttributes() async { - return await _update( + return await _getTask( http.deleteMethod('/indexes/$uid/settings/displayed-attributes')); } @override Future updateDisplayedAttributes( List displayedAttributes) async { - return await _update(http.putMethod( + return await _getTask(http.putMethod( '/indexes/$uid/settings/displayed-attributes', data: displayedAttributes)); } @@ -304,13 +496,13 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetDistinctAttribute() async { - return await _update( + return await _getTask( http.deleteMethod('/indexes/$uid/settings/distinct-attribute')); } @override Future updateDistinctAttribute(String distinctAttribute) async { - return await _update(http.putMethod( + return await _getTask(http.putMethod( '/indexes/$uid/settings/distinct-attribute', data: '"$distinctAttribute"')); } @@ -325,13 +517,13 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetRankingRules() async { - return await _update( + return await _getTask( http.deleteMethod('/indexes/$uid/settings/ranking-rules')); } @override Future updateRankingRules(List rankingRules) async { - return await _update(http.putMethod('/indexes/$uid/settings/ranking-rules', + return await _getTask(http.putMethod('/indexes/$uid/settings/ranking-rules', data: rankingRules)); } @@ -345,7 +537,7 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetStopWords() async { - return await _update( + return await _getTask( http.deleteMethod('/indexes/$uid/settings/stop-words')); } @@ -359,21 +551,21 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetSearchableAttributes() async { - return await _update( + return await _getTask( http.deleteMethod('/indexes/$uid/settings/searchable-attributes')); } @override Future updateSearchableAttributes( List searchableAttributes) async { - return await _update(http.putMethod( + return await _getTask(http.putMethod( '/indexes/$uid/settings/searchable-attributes', data: searchableAttributes)); } @override Future updateStopWords(List stopWords) async { - return await _update( + return await _getTask( http.putMethod('/indexes/$uid/settings/stop-words', data: stopWords)); } @@ -388,12 +580,12 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetSynonyms() async { - return await _update(http.deleteMethod('/indexes/$uid/settings/synonyms')); + return await _getTask(http.deleteMethod('/indexes/$uid/settings/synonyms')); } @override Future updateSynonyms(Map> synonyms) async { - return await _update( + return await _getTask( http.putMethod('/indexes/$uid/settings/synonyms', data: synonyms)); } @@ -407,13 +599,13 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { @override Future resetSortableAttributes() async { - return await _update( + return await _getTask( http.deleteMethod('/indexes/$uid/settings/sortable-attributes')); } @override Future updateSortableAttributes(List sortableAttributes) async { - return _update(http.putMethod('/indexes/$uid/settings/sortable-attributes', + return _getTask(http.putMethod('/indexes/$uid/settings/sortable-attributes', data: sortableAttributes)); } @@ -448,28 +640,4 @@ class MeiliSearchIndexImpl implements MeiliSearchIndex { Future getTask(int uid) async { return await client.getTask(uid); } - - @override - Future> addDocumentsInBatches( - List> documents, { - int batchSize = 1000, - String? primaryKey, - }) => - Future.wait( - documents - .slices(batchSize) - .map((slice) => addDocuments(slice, primaryKey: primaryKey)), - ); - - @override - Future> updateDocumentsInBatches( - List> documents, { - int batchSize = 1000, - String? primaryKey, - }) => - Future.wait( - documents - .slices(batchSize) - .map((slice) => updateDocuments(slice, primaryKey: primaryKey)), - ); } diff --git a/test/documents_test.dart b/test/documents_test.dart index d9d9644a..f12cc531 100644 --- a/test/documents_test.dart +++ b/test/documents_test.dart @@ -1,3 +1,6 @@ +import 'dart:convert'; + +import 'package:collection/collection.dart'; import 'package:meilisearch/meilisearch.dart'; import 'package:test/test.dart'; import 'utils/books_data.dart'; @@ -8,129 +11,411 @@ import 'utils/books.dart'; void main() { group('Documents', () { setUpClient(); + Future testUpdatedDataGeneral({ + required MeiliSearchIndex index, + required Set totalIds, + required List> originalData, + required List> updateProposals, + }) async { + final docs = await index.getDocuments( + params: DocumentsQuery(limit: totalIds.length + 10), + ); + expect(docs.total, equals(totalIds.length)); + int updated = 0; + for (var element in docs.results) { + final matchingOriginal = originalData.firstWhereOrNull( + (original) => original[kbookId] == element[kbookId], + ); + final matchingProposal = updateProposals.firstWhereOrNull( + (newProposal) => newProposal[kbookId] == element[kbookId], + ); + expect( + matchingOriginal != null || matchingProposal != null, + equals(true), + ); + //check that new title is equivalent to the update proposal + if (matchingProposal != null) { + expect(element[ktitle], equals(matchingProposal[ktitle])); + updated++; + } + //check that new tag is equivalent to the original tag + if (matchingOriginal != null) { + expect(element[ktag], equals(matchingOriginal[ktag])); + } + } + expect(updated, equals(updateProposals.length)); + } - test('Add documents', () async { - final index = client.index(randomUid()); - await index.addDocuments(books).waitFor(client: client); - final docs = await index.getDocuments(); - expect(docs.total, books.length); + late MeiliSearchIndex index; + setUp(() { + index = client.index(randomUid()); }); - test('Add documents in batches', () async { - final index = client.index(randomUid()); - const batchSize = 10; - const totalCount = (batchSize * 4) + 1; - const chunks = 5; + group("Normal", () { + final data = books; + final totalCount = data.length; - final tasks = await index.addDocumentsInBatches( - dynamicBooks(totalCount), - batchSize: batchSize, - ); + group("Add", () { + Future testAddedData() async { + final docs = await index.getDocuments(); + expect(docs.total, totalCount); + const itemEq = MapEquality(); + final listEq = UnorderedIterableEquality(itemEq); + expect(listEq.equals(docs.results, data), equals(true)); + } - expect(tasks.length, chunks); - await tasks.waitFor(client: client, timeout: Duration(seconds: 30)); - final docs = await index.getDocuments(); - expect(docs.total, totalCount); - }); + test('JSON raw', () async { + await index + .addDocumentsJson(jsonEncode(data)) + .waitFor(client: client); - test('Add documents with primary key', () async { - final index = client.index(randomUid()); - await index - .addDocuments(books, primaryKey: 'book_id') - .waitFor(client: client); - final docs = await index.getDocuments(); - expect(docs.total, books.length); - }); + await testAddedData(); + }); + test('JSON raw with primary keys', () async { + await index + .addDocumentsJson(jsonEncode(data), primaryKey: kbookId) + .waitFor(client: client); - test('Update documents', () async { - final index = await createBooksIndex(); - await index.updateDocuments([ - {'book_id': 1344, 'title': 'The Hobbit 2'}, - ]).waitFor(client: client); - final doc = await index.getDocument(1344); - expect(doc, isNotNull); - expect(doc?['book_id'], equals(1344)); - expect(doc?['title'], equals('The Hobbit 2')); - }); + await testAddedData(); + }); + test('JSON parsed', () async { + await index.addDocuments(data).waitFor(client: client); - test('Update documents in batches', () async { - const batchSize = 10; - const chunks = 3; - const totalCount = (batchSize * 2) + 1; - final index = await createDynamicBooksIndex(count: totalCount); - - final tasks = await index.updateDocumentsInBatches( - List.generate( - totalCount, - (index) => { - 'book_id': index, - 'title': 'Updated Book $index', - }, - ), - batchSize: batchSize, - ); + await testAddedData(); + }); + test('JSON parsed with primary key', () async { + await index + .addDocuments(books, primaryKey: kbookId) + .waitFor(client: client); + + await testAddedData(); + }); - expect(tasks.length, chunks); - await tasks.waitFor(client: client, timeout: Duration(seconds: 30)); - final docs = await index.getDocuments(); - expect(docs.total, totalCount); - docs.results.map((element) { - final bookId = element['book_id']; - expect(element['title'], equals('Updated Book $bookId')); + test('CSV', () async { + final csvData = dataAsCSV(data); + await index.addDocumentsCsv(csvData).waitFor(client: client); + + await testAddedData(); + }); + test('CSV with primary key', () async { + await index + .addDocumentsCsv(dataAsCSV(data), primaryKey: kbookId) + .waitFor(client: client); + + await testAddedData(); + }); + + test('NDJson', () async { + await index + .addDocumentsNdjson(dataAsNDJson(data)) + .waitFor(client: client); + + await testAddedData(); + }); + test('NDJson with primary key', () async { + await index + .addDocumentsNdjson(dataAsNDJson(data), primaryKey: kbookId) + .waitFor(client: client); + + await testAddedData(); + }); }); - }); - test('Update documents and pass a primary key', () async { - final uid = randomUid(); - var index = client.index(uid); - await index.updateDocuments([ - {'the_book_id': 1344, 'title': 'The Hobbit 2'}, - ], primaryKey: 'the_book_id').waitFor(client: client); - index = await client.getIndex(uid); - expect(index.primaryKey, 'the_book_id'); - final doc = await index.getDocument(1344); - expect(doc, isNotNull); - expect(doc?['the_book_id'], equals(1344)); - expect(doc?['title'], equals('The Hobbit 2')); - }); + group('Update', () { + final originalData = books; + final updateData = partialBookUpdate; + final totalIds = originalData + .map((e) => e[kbookId]) + .followedBy(updateData.map((e) => e[kbookId])) + .whereType() + .toSet(); - test('Delete one document', () async { - final index = await createBooksIndex(); - await index.deleteDocument(456).waitFor(client: client); - expect(index.getDocument(456), throwsA(isA())); - }); + Future testUpdatedData() => testUpdatedDataGeneral( + index: index, + totalIds: totalIds, + originalData: originalData, + updateProposals: updateData, + ); - test('Delete multiple documents', () async { - final index = await createBooksIndex(); - await index.deleteDocuments([456, 4]).waitFor(client: client); - expect(index.getDocument(4), throwsA(isA())); - expect(index.getDocument(456), throwsA(isA())); - }); + setUp(() async { + //seed the data + index = await createBooksIndex(uid: index.uid); + }); - test('Delete all documents', () async { - final index = await createBooksIndex(); - await index.deleteAllDocuments().waitFor(client: client); - final docs = await index.getDocuments(); - expect(docs.total, 0); - }); + test('JSON Raw', () async { + await index + .updateDocumentsJson(jsonEncode(updateData)) + .waitFor(client: client); - test('Get documents with query params', () async { - final index = await createBooksIndex(); - final docs = await index.getDocuments( - params: DocumentsQuery(offset: 1, fields: ['book_id'])); - expect(docs.total, equals(books.length)); - expect(docs.offset, equals(1)); - expect(docs.limit, greaterThan(0)); - expect(docs.results[0]['book_id'], isNotNull); - expect(docs.results[0]['title'], isNull); + await testUpdatedData(); + }); + test('JSON Raw With primary key', () async { + await index + .updateDocumentsJson( + jsonEncode(updateData), + primaryKey: kbookId, + ) + .waitFor(client: client); + + await testUpdatedData(); + }); + + test('JSON Parsed', () async { + await index.updateDocuments(updateData).waitFor(client: client); + + await testUpdatedData(); + }); + test('JSON Parsed With primary key', () async { + await index + .updateDocuments( + updateData, + primaryKey: kbookId, + ) + .waitFor(client: client); + + await testUpdatedData(); + }); + + test('CSV', () async { + await index + .updateDocumentsCsv(dataAsCSV(updateData)) + .waitFor(client: client); + + await testUpdatedData(); + }); + test('CSV With primaryKey', () async { + await index + .updateDocumentsCsv( + dataAsCSV(updateData), + primaryKey: kbookId, + ) + .waitFor(client: client); + + await testUpdatedData(); + }); + + test('NDJson', () async { + await index + .updateDocumentsNdjson(dataAsNDJson(updateData)) + .waitFor(client: client); + + await testUpdatedData(); + }); + test('NDJson With primaryKey', () async { + await index + .updateDocumentsNdjson( + dataAsNDJson(updateData), + primaryKey: kbookId, + ) + .waitFor(client: client); + + await testUpdatedData(); + }); + }); + + group("Delete", () { + setUp(() async { + //seed the index + index = await createBooksIndex(uid: index.uid); + }); + test('one document', () async { + await index.deleteDocument(456).waitFor(client: client); + + expect( + index.getDocument(456), throwsA(isA())); + }); + + test('multiple documents', () async { + await index.deleteDocuments([456, 4]).waitFor(client: client); + + expect(index.getDocument(4), throwsA(isA())); + expect( + index.getDocument(456), throwsA(isA())); + }); + + test('all documents', () async { + await index.deleteAllDocuments().waitFor(client: client); + + final docs = await index.getDocuments(); + expect(docs.total, 0); + }); + }); + group("Get", () { + setUp(() async { + index = await createBooksIndex(); + }); + test('documents with query params', () async { + final docs = await index.getDocuments( + params: DocumentsQuery(offset: 1, fields: [kbookId]), + ); + + expect(docs.total, equals(books.length)); + expect(docs.offset, equals(1)); + expect(docs.limit, greaterThan(0)); + expect(docs.results[0][kbookId], isNotNull); + expect(docs.results[0][ktitle], isNull); + }); + + test('document with fields', () async { + final doc = await index.getDocument(1, fields: [kbookId]); + + expect(doc?[kbookId], isNotNull); + expect(doc?[ktitle], isNull); + }); + }); }); + group("Batched", () { + group("Add", () { + const batchSize = 10; + const totalCount = (batchSize * 4) + 1; + const chunks = 5; + final List> data = dynamicBooks(totalCount); + + Future testAddedBatches(List tasks) async { + expect(tasks.length, chunks); + await tasks.waitFor(client: client, timeout: Duration(seconds: 30)); + final docs = await index.getDocuments(); + expect(docs.total, totalCount); + } + + test('JSON parsed', () async { + final tasks = await index.addDocumentsInBatches( + data, + batchSize: batchSize, + ); + + await testAddedBatches(tasks); + }); + test('JSON parsed with primary key', () async { + final tasks = await index.addDocumentsInBatches( + data, + batchSize: batchSize, + primaryKey: kbookId, + ); + + await testAddedBatches(tasks); + }); + + test('CSV', () async { + final tasks = await index.addDocumentsCsvInBatches( + dataAsCSV(data), + batchSize: batchSize, + ); + + await testAddedBatches(tasks); + }); + test('CSV with primary key', () async { + final tasks = await index.addDocumentsCsvInBatches( + dataAsCSV(data), + batchSize: batchSize, + primaryKey: kbookId, + ); + + await testAddedBatches(tasks); + }); - test('Get document with fields', () async { - final index = await createBooksIndex(); - final doc = await index.getDocument(1, fields: ['book_id']); + test('NDJSON', () async { + final tasks = await index.addDocumentsNdjsonInBatches( + dataAsNDJson(data), + batchSize: batchSize, + ); - expect(doc?['book_id'], isNotNull); - expect(doc?['title'], isNull); + await testAddedBatches(tasks); + }); + test('NDJSON with primary key', () async { + final tasks = await index.addDocumentsNdjsonInBatches( + dataAsNDJson(data), + batchSize: batchSize, + primaryKey: kbookId, + ); + + await testAddedBatches(tasks); + }); + }); + + group("Update", () { + const batchSize = 10; + const totalCount = (batchSize * 4) + 1; + const chunks = 5; + final originalData = dynamicBooks(totalCount); + final updateData = dynamicPartialBookUpdate(totalCount); + + final totalIds = originalData + .map((e) => e[kbookId]) + .followedBy(updateData.map((e) => e[kbookId])) + .whereType() + .toSet(); + + Future testUpdatedBatches(List tasks) async { + expect(tasks.length, chunks); + await tasks.waitFor(client: client, timeout: Duration(seconds: 30)); + await testUpdatedDataGeneral( + index: index, + totalIds: totalIds, + originalData: originalData, + updateProposals: updateData, + ); + } + + setUp(() async { + index = + await createDynamicBooksIndex(uid: index.uid, count: totalCount); + }); + + test('JSON parsed', () async { + final tasks = await index.updateDocumentsInBatches( + updateData, + batchSize: batchSize, + ); + + await testUpdatedBatches(tasks); + }); + test('JSON parsed with primary key', () async { + final tasks = await index.updateDocumentsInBatches( + updateData, + batchSize: batchSize, + primaryKey: kbookId, + ); + + await testUpdatedBatches(tasks); + }); + + test('CSV', () async { + final tasks = await index.updateDocumentsCsvInBatches( + dataAsCSV(updateData), + batchSize: batchSize, + ); + + await testUpdatedBatches(tasks); + }); + test('CSV with primary key', () async { + final tasks = await index.updateDocumentsCsvInBatches( + dataAsCSV(updateData), + batchSize: batchSize, + primaryKey: kbookId, + ); + + await testUpdatedBatches(tasks); + }); + + test('NDJSON', () async { + final tasks = await index.updateDocumentsNdjsonInBatches( + dataAsNDJson(updateData), + batchSize: batchSize, + ); + + await testUpdatedBatches(tasks); + }); + test('NDJSON with primary key', () async { + final tasks = await index.updateDocumentsNdjsonInBatches( + dataAsNDJson(updateData), + batchSize: batchSize, + primaryKey: kbookId, + ); + + await testUpdatedBatches(tasks); + }); + }); }); }); } diff --git a/test/utils/books_data.dart b/test/utils/books_data.dart index d58c0294..b486917a 100644 --- a/test/utils/books_data.dart +++ b/test/utils/books_data.dart @@ -1,61 +1,145 @@ +import 'dart:convert'; + +const kbookId = 'book_id'; +const ktitle = 'title'; +const ktag = 'tag'; +const kid = 'id'; + List> dynamicBooks(int count) { final tags = List.generate(4, (index) => "Tag $index"); return List.generate( count, (index) => { - 'book_id': index, - 'title': 'Book $index', - 'tag': tags[index % tags.length], + kbookId: index, + ktitle: 'Book $index', + ktag: tags[index % tags.length], + }, + ); +} + +List> dynamicPartialBookUpdate(int count) { + return List.generate( + count, + (index) { + //shift index by 5 to simulate 5 non-existent book update + index += 5; + return { + kbookId: index, + ktitle: 'UPDATED Book $index', + }; }, ); } +final partialBookUpdate = [ + {kbookId: 123, ktitle: 'UPDATED Pride and Prejudice'}, + {kbookId: 1344, ktitle: 'UPDATED The Hobbit'}, + //New book should be upserted + {kbookId: 654, ktitle: 'UPDATED Not Le Petit Prince'}, +]; + final books = [ - {'book_id': 123, 'title': 'Pride and Prejudice', 'tag': 'Romance'}, - {'book_id': 456, 'title': 'Le Petit Prince', 'tag': 'Tale'}, - {'book_id': 1, 'title': 'Alice In Wonderland', 'tag': 'Tale'}, - {'book_id': 1344, 'title': 'The Hobbit', 'tag': 'Epic fantasy'}, + {kbookId: 123, ktitle: 'Pride and Prejudice', ktag: 'Romance'}, + {kbookId: 456, ktitle: 'Le Petit Prince', ktag: 'Tale'}, + {kbookId: 1, ktitle: 'Alice In Wonderland', ktag: 'Tale'}, + {kbookId: 1344, ktitle: 'The Hobbit', ktag: 'Epic fantasy'}, { - 'book_id': 4, - 'title': 'Harry Potter and the Half-Blood Prince', - 'tag': 'Epic fantasy' + kbookId: 4, + ktitle: 'Harry Potter and the Half-Blood Prince', + ktag: 'Epic fantasy' }, { - 'book_id': 42, - 'title': 'The Hitchhiker\'s Guide to the Galaxy', - 'tag': 'Epic fantasy' + kbookId: 42, + ktitle: 'The Hitchhiker\'s Guide to the Galaxy', + ktag: 'Epic fantasy' }, - {'book_id': 9999, 'title': 'The Hobbit', 'tag': null}, + {kbookId: 9999, ktitle: 'The Hobbit', ktag: null}, ]; +enum CSVHeaderTypes { + string, + + ///TODO(ahmednfwela): blocked until https://github.com/meilisearch/meilisearch/pull/3576 + boolean, + number, + unkown, +} + +String dataAsCSV(List> data) { + final csvHeaders = {}; + final csvDataBuffer = StringBuffer(); + for (final element in data) { + for (final entry in element.entries) { + if (!csvHeaders.containsKey(entry.key)) { + final value = entry.value; + if (value != null) { + csvHeaders[entry.key] = value is String + ? CSVHeaderTypes.string + : value is num + ? CSVHeaderTypes.number + : value is bool + ? CSVHeaderTypes.boolean + : CSVHeaderTypes.unkown; + } + } + } + } + final csvHeaderEntries = csvHeaders.entries.toList(); + + data + .map( + (obj) => csvHeaderEntries + .map((e) => e.key) + .map((headerKey) => json.encode(obj[headerKey] ?? "")) + .join(','), + ) + .forEach(csvDataBuffer.writeln); + + final headerStr = csvHeaders.entries.map((header) { + final headerType = header.value; + final typeStr = headerType == CSVHeaderTypes.number + ? ':number' + : headerType == CSVHeaderTypes.boolean + ? ':boolean' + : null; + return jsonEncode('${header.key}${typeStr ?? ""}'); + }).join(","); + + return '$headerStr\n${csvDataBuffer.toString()}'; +} + +String dataAsNDJson(List> data) { + return data.map(jsonEncode).join("\n"); +} + final nestedBooks = [ { - "id": 1, - "title": 'Pride and Prejudice', + kid: 1, + ktitle: 'Pride and Prejudice', "info": { "comment": 'A great book', "reviewNb": 500, }, }, { - "id": 2, - "title": 'Le Petit Prince', + kid: 2, + ktitle: 'Le Petit Prince', "info": { "comment": 'A french book', "reviewNb": 600, }, }, { - "id": 3, - "title": 'Le Rouge et le Noir', + kid: 3, + ktitle: 'Le Rouge et le Noir', "info": { "comment": 'Another french book', "reviewNb": 700, }, }, { - "id": 4, - "title": 'Alice In Wonderland', + kid: 4, + ktitle: 'Alice In Wonderland', "comment": 'A weird book', "info": { "comment": 'A weird book', @@ -63,20 +147,20 @@ final nestedBooks = [ }, }, { - "id": 5, - "title": 'The Hobbit', + kid: 5, + ktitle: 'The Hobbit', "info": { "comment": 'An awesome book', "reviewNb": 900, }, }, { - "id": 6, - "title": 'Harry Potter and the Half-Blood Prince', + kid: 6, + ktitle: 'Harry Potter and the Half-Blood Prince', "info": { "comment": 'The best book', "reviewNb": 1000, }, }, - {"id": 7, "title": "The Hitchhiker's Guide to the Galaxy"}, + {kid: 7, ktitle: "The Hitchhiker's Guide to the Galaxy"}, ]; diff --git a/test/utils/client.dart b/test/utils/client.dart index 685bdbb1..5c4db8d5 100644 --- a/test/utils/client.dart +++ b/test/utils/client.dart @@ -30,7 +30,7 @@ Future deleteAllKeys() async { } } -Future setUpClient() async { +void setUpClient() { setUp(() { final String server = testServer; @@ -44,7 +44,7 @@ Future setUpClient() async { }); } -Future setUpHttp() async { +void setUpHttp() { setUp(() { final String server = testServer; @@ -52,7 +52,7 @@ Future setUpHttp() async { }); } -Future setUpClientWithWrongUrl() async { +void setUpClientWithWrongUrl() { setUp(() { final String server = 'http://wrongurl:1234'; final connectTimeout = Duration(milliseconds: 1000);