commit 30b2545f4c7a97a60ffb512c08a4d3bb9ffd5e75 Author: 4831c0 <4831c0@proton.me> Date: Wed Mar 19 19:40:26 2025 +0100 3.1.0+1 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..c88e534 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,431 @@ +## 3.1.0+1 + +### Fixes + +- Fixed error building MacOS library + +## 3.1.0 + +### Breaking + +Sorry for this breaking change. Unfortunately, it was necessary to fix stability issues on Android. + +- `directory` is now required for `Isar.open()` and `Isar.openSync()` + +### Fixes + +- Fixed a crash that occasionally occurred when opening Isar +- Fixed a schema migration issue +- Fixed an issue where embedded class renaming didn't work correctly + +### Enhancements + +- Many internal improvements +- Performance improvements + +## 3.0.6 + +### Fixes + +- Add check to verify transactions are used for correct instance +- Add check to verify that async transactions are still active +- Fix upstream issue with opening databases + +## 3.0.5 + +### Enhancements + +- Improved performance for all operations +- Added `maxSizeMiB` option to `Isar.open()` to specify the maximum size of the database file +- Significantly reduced native library size +- With the help of the community, the docs have been translated into a range of languages +- Improved API docs +- Added integration tests for more platforms to ensure high-quality releases +- Support for unicode paths on Windows + +### Fixes + +- Fixed crash while opening Isar +- Fixed crash on older Android devices +- Fixed a native port that was not closed correctly in some cases +- Added swift version to podspec +- Fixed crash on Windows +- Fixed "IndexNotFound" error + +## 3.0.4 + +REDACTED. + +## 3.0.3 + +REDACTED. + +## 3.0.2 + +### Enhancements + +- The Inspector now supports creating objects and importing JSON +- Added Inspector check to make sure Chrome is used + +### Fixes + +- Added support for the latest analyzer +- Fixed native ports that were not closed correctly in some cases +- Added support for Ubuntu 18.04 and older +- Fixed issue with aborting transactions +- Fixed crash when invalid JSON was provided to `importJsonRaw()` +- Added missing `exportJsonSync()` and `exportJsonRawSync()` +- Fixed issue where secondary instance could not be selected in the Inspector + +## 3.0.1 + +### Enhancements + +- Support for arm64 iOS Simulators + +### Fixes + +- Fixed issue where `.anyOf()`, `.allOf()`, and `.oneOf()` could not be negated +- Fixed too low min-iOS version. The minimum supported is 11.0 +- Fixed error during macOS App Store build + +## 3.0.0 + +This release has been a lot of work! Thanks to everyone who contributed and joined the countless discussions. You are really awesome! + +Special thanks to [@Jtplouffe](https://github.com/Jtplouffe) and [@Peyman](https://github.com/Viper-Bit) for their incredible work. + +### Web support + +This version does not support the web target yet. It will be back in the next version. Please continue using 2.5.0 if you need web support. + +### Enhancements + +- Completely new Isar inspector that does not need to be installed anymore +- Extreme performance improvements for almost all operations (up to 50%) +- Support for embedded objects using `@embedded` +- Support for enums using `@enumerated` +- Vastly improved Isar binary format space efficiency resulting in about 20% smaller databases +- Added `id`, `byte`, `short` and `float` typedefs +- `IsarLinks` now support all `Set` methods based on the Isar `Id` of objects +- Added `download` option to `Isar.initializeIsarCore()` to download binaries automatically +- Added `replace` option for indexes +- Added verification for correct Isar binary version +- Added `collection.getSize()` and `collection.getSizeSync()` +- Added `query.anyOf()` and `query.allOf()` query modifiers +- Support for much more complex composite index queries +- Support for logical XOR and the `.oneOf()` query modifier +- Made providing a path optional +- The default Isar name is now `default` and stored in `dir/name.isar` and `dir/name.isar.lock` +- On non-web platforms, `IsarLink` and `IsarLinks` will load automatically +- `.putSync()`, `.putAllSync()` etc. will now save links recursively by default +- Added `isar.getSize()` and `isar.getSizeSync()` +- Added `linksLengthEqualTo()`, `linksIsEmpty()`, `linksIsNotEmpty()`, `linksLengthGreaterThan()`, `linksLengthLessThan()`, `linksLengthBetween()` and `linkIsNull()` filters +- Added `listLengthEqualTo()`, `listIsEmpty()`, `listIsNotEmpty()`, `listLengthGreaterThan()`, `listLengthLessThan()`, `listLengthBetween()` filters +- Added `isNotNull()` filters +- Added `compactOnLaunch` conditions to `Isar.open()` for automatic database compaction +- Added `isar.copyToFile()` which copies a compacted version of the database to a path +- Added check to verify that linked collections schemas are provided for opening an instance +- Apply default values from constructor during deserialization +- Added `isar.verify()` and `col.verify()` methods for checking database integrity in unit tests +- Added missing float and double queries and an `epsilon` parameter + +### Breaking changes + +- Removed `TypeConverter` support in favor of `@embedded` and `@enumerated` +- Removed `@Id()` and `@Size32()` annotations in favor of the `Id` and `short` types +- Changed the `schemas` parameter from named to positional +- The maximum size of objects is now 16MB +- Removed `replaceOnConflict` and `saveLinks` parameter from `collection.put()` and `collection.putAll()` +- Removed `isar` parameter from `Isar.txn()`, `Isar.writeTxn()`, `Isar.txnSync()` and `Isar.writeTxnSync()` +- Removed `query.repeat()` +- Removed `query.sortById()` and `query.distinctById()` +- Fixed `.or()` instead of `.and()` being used implicitly when combining filters +- Renamed multi-entry where clauses from `.yourListAnyEqualTo()` to `.yourListElementEqualTo()` to avoid confusion +- Isar will no longer create the provided directory. Make sure it exists before opening an Isar Instance. +- Changed the default index type for all `List`s to `IndexType.hash` +- Renamed `isar.getCollection()` to `isar.collection()` +- It is no longer allowed to extend or implement another collection +- Unsupported properties will no longer be ignored by default +- Renamed the `initialReturn` parameter to `fireImmediately` +- Renamed `Isar.initializeLibraries()` to `Isar.initializeIsarCore()` + +### Fixes + +There are too many fixes to list them all. + +- A lot of link fixes and a slight behavior change to make them super reliable +- Fixed missing symbols on older Android phones +- Fixed composite queries +- Fixed various generator issues +- Fixed error retrieving the id property in a query +- Fixed missing symbols on 32-bit Android 5 & 6 devices +- Fixed inconsistent `null` handling in json export +- Fixed default directory issue on Android +- Fixed different where clauses returning duplicate results +- Fixed hash index issue where multiple list values resulted in the same hash +- Fixed edge case where creating a new index failed + +## 2.5.0 + +### Enhancements + +- Support for Android x86 (32 bit emulator) and macOS arm64 (Apple Silicon) +- Greatly improved test coverage for sync methods +- `col.clear()` now resets the auto increment counter to `0` +- Significantly reduced Isar Core binary size (about 1.4MB -> 800KB) + +### Minor Breaking + +- Changed `initializeLibraries(Map libraries)` to `initializeLibraries(Map libraries)` +- Changed min Dart SDK to `2.16.0` + +### Fixes + +- Fixed issue with `IsarLink.saveSync()` +- Fixed `id` queries +- Fixed error thrown by `BroadcastChannel` in Firefox +- Fixed Isar Inspector connection issue + +## 2.4.0 + +### Enhancements + +- Support for querying links +- Support for filtering and sorting links +- Added methods to update and count links without loading them +- Added `isLoaded` property to links +- Added methods to count the number of objects in a collection +- Big internal improvements + +### Minor Breaking + +- There are now different kinds of where clauses for dynamic queries +- `isar.getCollection()` no longer requires the name of the collection +- `Isar.instanceNames` now returns a `Set` instead of a `List` + +### Fixes + +- Fixed iOS crash that frequently happened on older devices +- Fixed 32bit issue on Android +- Fixed link issues +- Fixed missing `BroadcastChannel` API for older Safari versions + +## 2.2.1 + +### Enhancements + +- Reduced Isar web code size by 50% +- Made `directory` parameter of `Isar.open()` optional for web +- Made `name` parameter of `Isar.getInstance()` optional +- Added `Isar.defaultName` constant +- Enabled `TypeConverter`s with supertypes +- Added message if `TypeConverter` nullability doesn't match +- Added more tests + +### Fixes + +- Fixed issue with date queries +- Fixed `FilterGroup.not` constructor (thanks for the PR @jtzell) + +## 2.2.0 + +Isar now has full web support ๐ŸŽ‰. No changes to your code required, just run it. + +_Web passes all unit tests but is still considered beta for now._ + +### Minor Breaking + +- Added `saveLinks` parameter to `.put()` and `.putAll()` which defaults to `false` +- Changed default `overrideChanges` parameter of `links.load()` to `true` to avoid unintended behavior + +### Enhancements + +- Full web support! +- Improved write performance +- Added `deleteFromDisk` option to `isar.close()` +- Added `.reset()` and `.resetSync()` methods to `IsarLink` and `IsarLinks` +- Improved `links.save()` performance +- Added many tests + +### Fixed + +- Fixed value of `null` dates to be `DateTime.fromMillisecondsSinceEpoch(0)` +- Fixed problem with migration +- Fixed incorrect list values for new properties (`[]` instead of `null`) +- Improved handling of link edge-cases + +## 2.1.4 + +- Removed `path` dependency +- Fixed incorrect return value of `deleteByIndex()` +- Fixed wrong auto increment ids in some cases (thanks @robban112) +- Fixed an issue with `Isar.close()` (thanks @msxenon) +- Fixed `$` escaping in generated code (thanks @jtzell) +- Fixed broken link in pub.dev example page + +## 2.1.0 + +`isar_connect` is now integrated into `isar` + +### Enhancements + +- Added check for outdated generated files +- Added check for changed schema across isolates +- Added `Isar.openSync()` +- Added `col.importJsonRawSync()`, `col.importJsonSync()`, `query.exportJsonRawSync()`, `query.exportJsonSync()` +- Improved performance for queries +- Improved handling of ffi memory +- More tests + +### Fixed + +- Fixed issue where imported json required existing ids +- Fixed issue with transaction handling (thanks @Peng-Qian for the awesome help) +- Fixed issue with `@Ignore` annotation not always working +- Fixed issue with `getByIndex()` not returning correct object id (thanks @jtzell) + +## 2.0.0 + +### Breaking + +- The id for non-final objects is now assigned automatically after `.put()` and `.putSync()` +- `double` and `List` indexes can no longer be at the beginning of a composite index +- `List` indexes can no longer be hashed +- `.greaterThan()`, `.lessThan()` and `.between()` filters and are now excluding for `double` values (`>=` -> `>`) +- Changed the default index type for lists to `IndexType.value` +- `IsarLink` and `IsarLinks` will no longer be initialized by Isar and must not be `nullable` or `late`. +- Dart `2.14` or higher is required + +### Enhancements + +- Added API docs for all public methods +- Added `isar.clear()`, `isar.clearSync()`, `col.clear()` and `col.clearSync()` +- Added `col.filter()` as shortcut for `col.where().filter()` +- Added `include` parameter to `.greaterThan()` and `.lessThan()` filters and where clauses +- Added `includeLower` and `includeUpper` parameters to `.between()` filters and where clauses +- Added `Isar.autoIncrement` to allow non-nullable auto-incrementing ids +- `Isar.close()` now returns whether the last instance was closed +- List values in composite indexes are now of type `IndexType.hash` automatically +- Allowed multiple indexes on the same property +- Removed exported packages from API docs +- Improved generated code +- Improved Isar Core error messages +- Minor performance improvements +- Automatic XCode configuration +- Updated analyzer to `3.0.0` +- More tests + +### Fixed + +- `IsarLink` and `IsarLinks` can now be final +- Fixed multi-entry index queries returning items multiple times in some cases +- Fixed `.anyLessThan()` and `.anyGreaterThan()` issues +- Fixed issues with backlinks +- Fixed issue where query only returned the first `99999` results +- Fixed issue with id where clauses +- Fixed default index type for lists and bytes +- Fixed issue where renaming indexes was not possible +- Fixed issue where wrong index name was used for `.getByX()` and `.deleteByX()` +- Fixed issue where composite indexes did not allow non-hashed Strings as last value +- Fixed issue where `@Ignore()` fields were not ignored + +## 1.0.5 + +### Enhancements + +- Updated dependencies + +### Fixes: + +- Included desktop binaries +- Fixed "Cannot allocate memory" error on older iOS devices +- Fixed stripped binaries for iOS release builds +- Fixed IsarInspector issues (thanks to [RubenBez](https://github.com/RubenBez) and [rizzi37](https://github.com/rizzi37)) + +## 1.0.0+1 + +Added missing binaries + +## 1.0.0 + +Switched from liblmdb to libmdbx for better performance, more stability and many internal improvements. + +### Breaking + +The internal database format has been changed to improve performance. Old databases do not work anymore! + +### Fixes + +- Fix issue with links being removed after object update +- Fix String index problems + +### Enhancements + +- Support `greaterThan`, `lessThan` and `between` queries for String values +- Support for inheritance (enabled by default) +- Support for `final` properties and getters +- Support for `freezed` and other code generators +- Support getting / deleting objects by a key `col.deleteByName('Anne')` +- Support for list indexes (hash an element based) +- Generator now creates individual files instead of one big file +- Allow specifying the collection accessor name +- Unsupported properties are now ignored automatically +- Returns the assigned ids after `.put()` operations (objects are no longer mutated) +- Introduces `replaceOnConflict` option for `.put()` (instead of specifying it for index) +- many more... + +### Internal + +- Improve generated code +- Many new unit tests + +## 0.4.0 + +### Breaking + +- Remove `.where...In()` and `...In()` extension methods +- Split `.watch(lazy: bool)` into `.watch()` and `.watchLazy()` +- Remove `include` option for filters + +### Fixes + +- Generate id for JSON imports that don't have an id +- Enable `sortBy` and `thenBy` generation + +### Enhancements + +- Add `.optional()` and `.repeat()` query modifiers +- Support property queries +- Support query aggregation +- Support dynamic queries (for custom query languages) +- Support multi package configuration with `@ExternalCollection()` +- Add `caseSensitive` option to `.distinctBy()` + +### Internal + +- Change iOS linking +- Improve generated code +- Set up integration tests and improve unit tests +- Use CORE/0.4.0 + +## 0.2.0 + +- Link support +- Many improvements and fixes + +## 0.1.0 + +- Support for links and backlinks + +## 0.0.4 + +- Bugfixes and many improvements + +## 0.0.2 + +Fix dependency issue + +## 0.0.1 + +Initial release diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f0d4b83 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Simon Leier + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2466b44 --- /dev/null +++ b/README.md @@ -0,0 +1,267 @@ +

+ + + +

Isar Database

+

+ +

+ + + + + + + + + + + + + + + +

+ +

+ Quickstart โ€ข + Documentation โ€ข + Sample Apps โ€ข + Support & Ideas โ€ข + Pub.dev +

+ +> #### Isar [ee-zahr]: +> +> 1. River in Bavaria, Germany. +> 2. [Crazy fast](#benchmarks) NoSQL database that is a joy to use. + +## Features + +- ๐Ÿ’™ **Made for Flutter**. Easy to use, no config, no boilerplate +- ๐Ÿš€ **Highly scalable** The sky is the limit (pun intended) +- ๐Ÿญ **Feature rich**. Composite & multi-entry indexes, query modifiers, JSON support etc. +- โฑ **Asynchronous**. Parallel query operations & multi-isolate support by default +- ๐Ÿฆ„ **Open source**. Everything is open source and free forever! + +Isar database can do much more (and we are just getting started) + +- ๐Ÿ•ต๏ธ **Full-text search**. Make searching fast and fun +- ๐Ÿ“ฑ **Multiplatform**. iOS, Android, Desktop +- ๐Ÿงช **ACID semantics**. Rely on database consistency +- ๐Ÿ’ƒ **Static typing**. Compile-time checked and autocompleted queries +- โœจ **Beautiful documentation**. Readable, easy to understand and ever-improving + +Join the [Telegram group](https://t.me/isardb) for discussion and sneak peeks of new versions of the DB. + +If you want to say thank you, star us on GitHub and like us on pub.dev ๐Ÿ™Œ๐Ÿ’™ + +## Quickstart + +Holy smokes you're here! Let's get started on using the coolest Flutter database out there... + +### 1. Add to pubspec.yaml + +```yaml +isar_version: &isar_version 3.1.0 # define the version to be used + +dependencies: + isar: *isar_version + isar_flutter_libs: *isar_version # contains Isar Core + +dev_dependencies: + isar_generator: *isar_version + build_runner: any +``` + +### 2. Annotate a Collection + +```dart +part 'email.g.dart'; + +@collection +class Email { + Id id = Isar.autoIncrement; // you can also use id = null to auto increment + + @Index(type: IndexType.value) + String? title; + + List? recipients; + + @enumerated + Status status = Status.pending; +} + +@embedded +class Recipient { + String? name; + + String? address; +} + +enum Status { + draft, + pending, + sent, +} +``` + +### 3. Open a database instance + +```dart +final dir = await getApplicationDocumentsDirectory(); +final isar = await Isar.open( + [EmailSchema], + directory: dir.path, +); +``` + +### 4. Query the database + +```dart +final emails = await isar.emails.filter() + .titleContains('awesome', caseSensitive: false) + .sortByStatusDesc() + .limit(10) + .findAll(); +``` + +## Isar Database Inspector + +The Isar Inspector allows you to inspect the Isar instances & collections of your app in real-time. You can execute queries, edit properties, switch between instances and sort the data. + + + +To launch the inspector, just run your Isar app in debug mode and open the Inspector link in the logs. + +## CRUD operations + +All basic crud operations are available via the `IsarCollection`. + +```dart +final newEmail = Email()..title = 'Amazing new database'; + +await isar.writeTxn(() { + await isar.emails.put(newEmail); // insert & update +}); + +final existingEmail = await isar.emails.get(newEmail.id!); // get + +await isar.writeTxn(() { + await isar.emails.delete(existingEmail.id!); // delete +}); +``` + +## Database Queries + +Isar database has a powerful query language that allows you to make use of your indexes, filter distinct objects, use complex `and()`, `or()` and `.xor()` groups, query links and sort the results. + +```dart +final importantEmails = isar.emails + .where() + .titleStartsWith('Important') // use index + .limit(10) + .findAll() + +final specificEmails = isar.emails + .filter() + .recipient((q) => q.nameEqualTo('David')) // query embedded objects + .or() + .titleMatches('*university*', caseSensitive: false) // title containing 'university' (case insensitive) + .findAll() +``` + +## Database Watchers + +With Isar database, you can watch collections, objects, or queries. A watcher is notified after a transaction commits successfully and the target actually changes. +Watchers can be lazy and not reload the data or they can be non-lazy and fetch new results in the background. + +```dart +Stream collectionStream = isar.emails.watchLazy(); + +Stream> queryStream = importantEmails.watch(); + +queryStream.listen((newResult) { + // do UI updates +}) +``` + +## Benchmarks + +Benchmarks only give a rough idea of the performance of a database but as you can see, Isar NoSQL database is quite fast ๐Ÿ˜‡ + +| | | +| ---------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------- | +| | | + +If you are interested in more benchmarks or want to check how Isar performs on your device you can run the [benchmarks](https://github.com/isar/isar_benchmark) yourself. + +## Unit tests + +If you want to use Isar database in unit tests or Dart code, call `await Isar.initializeIsarCore(download: true)` before using Isar in your tests. + +Isar NoSQL database will automatically download the correct binary for your platform. You can also pass a `libraries` map to adjust the download location for each platform. + +Make sure to use `flutter test -j 1` to avoid tests running in parallel. This would break the automatic download. + +## Contributors โœจ + +Big thanks go to these wonderful people: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Alexis

Burak

Carlo Loguercio

Frostedfox

Hafeez Rana

Hamed H.

JT

Jack Rivers

Joachim Nohl

Johnson

LaLucid

Lety

Michael

Moseco

Nelson Mutane

Peyman

Simon Leier

Ura

blendthink

mnkeis

nobkd
+ + + + + + +### License + +``` +Copyright 2022 Simon Leier + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +``` diff --git a/analysis_options.yaml b/analysis_options.yaml new file mode 100644 index 0000000..ee7facb --- /dev/null +++ b/analysis_options.yaml @@ -0,0 +1,11 @@ +include: package:very_good_analysis/analysis_options.yaml + +analyzer: + exclude: + - "lib/src/native/bindings.dart" + + errors: + cascade_invocations: ignore + avoid_positional_boolean_parameters: ignore + parameter_assignments: ignore + prefer_asserts_with_message: ignore \ No newline at end of file diff --git a/example/README.md b/example/README.md new file mode 100644 index 0000000..e43ff5c --- /dev/null +++ b/example/README.md @@ -0,0 +1,3 @@ +## The fastest way to get started is by following the [Quickstart Guide](https://isar.dev/tutorials/quickstart.html)! + +Have fun using Isar! \ No newline at end of file diff --git a/lib/isar.dart b/lib/isar.dart new file mode 100644 index 0000000..429086e --- /dev/null +++ b/lib/isar.dart @@ -0,0 +1,49 @@ +library isar; + +import 'dart:async'; +import 'dart:convert'; +import 'dart:developer'; +import 'dart:typed_data'; + +import 'package:isar/src/isar_connect_api.dart'; +import 'package:isar/src/native/isar_core.dart' + if (dart.library.html) 'package:isar/src/web/isar_web.dart'; +import 'package:isar/src/native/isar_link_impl.dart' + if (dart.library.html) 'package:isar/src/web/isar_link_impl.dart'; +import 'package:isar/src/native/open.dart' + if (dart.library.html) 'package:isar/src/web/open.dart'; +import 'package:isar/src/native/split_words.dart' + if (dart.library.html) 'package:isar/src/web/split_words.dart'; +import 'package:meta/meta.dart'; +import 'package:meta/meta_meta.dart'; + +part 'src/annotations/backlink.dart'; +part 'src/annotations/collection.dart'; +part 'src/annotations/embedded.dart'; +part 'src/annotations/enumerated.dart'; +part 'src/annotations/ignore.dart'; +part 'src/annotations/index.dart'; +part 'src/annotations/name.dart'; +part 'src/annotations/type.dart'; +part 'src/isar.dart'; +part 'src/isar_collection.dart'; +part 'src/isar_connect.dart'; +part 'src/isar_error.dart'; +part 'src/isar_link.dart'; +part 'src/isar_reader.dart'; +part 'src/isar_writer.dart'; +part 'src/query.dart'; +part 'src/query_builder.dart'; +part 'src/query_builder_extensions.dart'; +part 'src/query_components.dart'; +part 'src/schema/collection_schema.dart'; +part 'src/schema/index_schema.dart'; +part 'src/schema/link_schema.dart'; +part 'src/schema/property_schema.dart'; +part 'src/schema/schema.dart'; + +/// @nodoc +@protected +typedef IsarUint8List = Uint8List; + +const bool _kIsWeb = identical(0, 0.0); diff --git a/lib/src/annotations/backlink.dart b/lib/src/annotations/backlink.dart new file mode 100644 index 0000000..54b0ab2 --- /dev/null +++ b/lib/src/annotations/backlink.dart @@ -0,0 +1,11 @@ +part of isar; + +/// Annotation to create a backlink to an existing link. +@Target({TargetKind.field}) +class Backlink { + /// Annotation to create a backlink to an existing link. + const Backlink({required this.to}); + + /// The Dart name of the target link. + final String to; +} diff --git a/lib/src/annotations/collection.dart b/lib/src/annotations/collection.dart new file mode 100644 index 0000000..979efb0 --- /dev/null +++ b/lib/src/annotations/collection.dart @@ -0,0 +1,34 @@ +part of isar; + +/// Annotation to create an Isar collection. +const collection = Collection(); + +/// Annotation to create an Isar collection. +@Target({TargetKind.classType}) +class Collection { + /// Annotation to create an Isar collection. + const Collection({ + this.inheritance = true, + this.accessor, + this.ignore = const {}, + }); + + /// Should properties and accessors of parent classes and mixins be included? + final bool inheritance; + + /// Allows you to override the default collection accessor. + /// + /// Example: + /// ```dart + /// @Collection(accessor: 'col') + /// class MyCol { + /// Id? id; + /// } + /// + /// // access collection using: isar.col + /// ``` + final String? accessor; + + /// A list of properties or getter names that Isar should ignore. + final Set ignore; +} diff --git a/lib/src/annotations/embedded.dart b/lib/src/annotations/embedded.dart new file mode 100644 index 0000000..d8f5ce9 --- /dev/null +++ b/lib/src/annotations/embedded.dart @@ -0,0 +1,17 @@ +part of isar; + +/// Annotation to nest objects of this type in collections. +const embedded = Embedded(); + +/// Annotation to nest objects of this type in collections. +@Target({TargetKind.classType}) +class Embedded { + /// Annotation to nest objects of this type in collections. + const Embedded({this.inheritance = true, this.ignore = const {}}); + + /// Should properties and accessors of parent classes and mixins be included? + final bool inheritance; + + /// A list of properties or getter names that Isar should ignore. + final Set ignore; +} diff --git a/lib/src/annotations/enumerated.dart b/lib/src/annotations/enumerated.dart new file mode 100644 index 0000000..ade7e5f --- /dev/null +++ b/lib/src/annotations/enumerated.dart @@ -0,0 +1,33 @@ +part of isar; + +/// Annotation to specify how an enum property should be serialized. +const enumerated = Enumerated(EnumType.ordinal); + +/// Annotation to specify how an enum property should be serialized. +@Target({TargetKind.field, TargetKind.getter}) +class Enumerated { + /// Annotation to specify how an enum property should be serialized. + const Enumerated(this.type, [this.property]); + + /// How the enum property should be serialized. + final EnumType type; + + /// The property to use for the enum values. + final String? property; +} + +/// Enum type for enum values. +enum EnumType { + /// Stores the index of the enum as a byte value. + ordinal, + + /// Stores the index of the enum as a 4-byte value. Use this type if your enum + /// has more than 256 values or needs to be nullable. + ordinal32, + + /// Uses the name of the enum value. + name, + + /// Uses a custom enum value. + value +} diff --git a/lib/src/annotations/ignore.dart b/lib/src/annotations/ignore.dart new file mode 100644 index 0000000..181224e --- /dev/null +++ b/lib/src/annotations/ignore.dart @@ -0,0 +1,11 @@ +part of isar; + +/// Annotate a property or accessor in an Isar collection to ignore it. +const ignore = Ignore(); + +/// Annotate a property or accessor in an Isar collection to ignore it. +@Target({TargetKind.field, TargetKind.getter}) +class Ignore { + /// Annotate a property or accessor in an Isar collection to ignore it. + const Ignore(); +} diff --git a/lib/src/annotations/index.dart b/lib/src/annotations/index.dart new file mode 100644 index 0000000..62546a9 --- /dev/null +++ b/lib/src/annotations/index.dart @@ -0,0 +1,76 @@ +part of isar; + +/// Specifies how an index is stored in Isar. +enum IndexType { + /// Stores the value as-is in the index. + value, + + /// Strings or Lists can be hashed to reduce the storage required by the + /// index. The disadvantage of hash indexes is that they can't be used for + /// prefix scans (`startsWith()` where clauses). String and list indexes are + /// hashed by default. + hash, + + /// `List` can hash its elements. + hashElements, +} + +/// Annotate properties to build an index. +@Target({TargetKind.field, TargetKind.getter}) +class Index { + /// Annotate properties to build an index. + const Index({ + this.name, + this.composite = const [], + this.unique = false, + this.replace = false, + this.type, + this.caseSensitive, + }); + + /// Name of the index. By default, the names of the properties are + /// concatenated using "_" + final String? name; + + /// Specify up to two other properties to build a composite index. + final List composite; + + /// A unique index ensures the index does not contain any duplicate values. + /// Any attempt to insert or update data into the unique index that causes a + /// duplicate will result in an error. + final bool unique; + + /// If set to `true`, inserting a duplicate unique value will replace the + /// existing object instead of throwing an error. + final bool replace; + + /// Specifies how an index is stored in Isar. + /// + /// Defaults to: + /// - `IndexType.hash` for `String`s and `List`s + /// - `IndexType.value` for all other types + final IndexType? type; + + /// String or `List` indexes can be case sensitive (default) or case + /// insensitive. + final bool? caseSensitive; +} + +/// Another property that is part of the composite index. +class CompositeIndex { + /// Another property that is part of the composite index. + const CompositeIndex( + this.property, { + this.type, + this.caseSensitive, + }); + + /// Dart name of the property. + final String property; + + /// See [Index.type]. + final IndexType? type; + + /// See [Index.caseSensitive]. + final bool? caseSensitive; +} diff --git a/lib/src/annotations/name.dart b/lib/src/annotations/name.dart new file mode 100644 index 0000000..a950ac4 --- /dev/null +++ b/lib/src/annotations/name.dart @@ -0,0 +1,13 @@ +part of isar; + +/// Annotate Isar collections or properties to change their name. +/// +/// Can be used to change the name in Dart independently of Isar. +@Target({TargetKind.classType, TargetKind.field, TargetKind.getter}) +class Name { + /// Annotate Isar collections or properties to change their name. + const Name(this.name); + + /// The name this entity should have in the database. + final String name; +} diff --git a/lib/src/annotations/type.dart b/lib/src/annotations/type.dart new file mode 100644 index 0000000..8732265 --- /dev/null +++ b/lib/src/annotations/type.dart @@ -0,0 +1,20 @@ +// ignore_for_file: camel_case_types + +part of isar; + +/// Type to specify the id property of a collection. +typedef Id = int; + +/// Type to mark an [int] property or List as 8-bit sized. +/// +/// You may only store values between 0 and 255 in such a property. +typedef byte = int; + +/// Type to mark an [int] property or List as 32-bit sized. +/// +/// You may only store values between -2147483648 and 2147483647 in such a +/// property. +typedef short = int; + +/// Type to mark a [double] property or List to have 32-bit precision. +typedef float = double; diff --git a/lib/src/common/isar_common.dart b/lib/src/common/isar_common.dart new file mode 100644 index 0000000..ea0873e --- /dev/null +++ b/lib/src/common/isar_common.dart @@ -0,0 +1,222 @@ +// ignore_for_file: invalid_use_of_protected_member + +import 'dart:async'; + +import 'package:isar/isar.dart'; + +const Symbol _zoneTxn = #zoneTxn; + +/// @nodoc +abstract class IsarCommon extends Isar { + /// @nodoc + IsarCommon(super.name); + + final List> _activeAsyncTxns = []; + var _asyncWriteTxnsActive = 0; + + Transaction? _currentTxnSync; + + void _requireNotInTxn() { + if (_currentTxnSync != null || Zone.current[_zoneTxn] != null) { + throw IsarError( + 'Cannot perform this operation from within an active transaction. ' + 'Isar does not support nesting transactions.', + ); + } + } + + /// @nodoc + Future beginTxn(bool write, bool silent); + + Future _beginTxn( + bool write, + bool silent, + Future Function() callback, + ) async { + requireOpen(); + _requireNotInTxn(); + + final completer = Completer(); + _activeAsyncTxns.add(completer.future); + + try { + if (write) { + _asyncWriteTxnsActive++; + } + + final txn = await beginTxn(write, silent); + + final zone = Zone.current.fork( + zoneValues: {_zoneTxn: txn}, + ); + + T result; + try { + result = await zone.run(callback); + await txn.commit(); + } catch (e) { + await txn.abort(); + rethrow; + } finally { + txn.free(); + } + return result; + } finally { + completer.complete(); + _activeAsyncTxns.remove(completer.future); + if (write) { + _asyncWriteTxnsActive--; + } + } + } + + @override + Future txn(Future Function() callback) { + return _beginTxn(false, false, callback); + } + + @override + Future writeTxn(Future Function() callback, {bool silent = false}) { + return _beginTxn(true, silent, callback); + } + + /// @nodoc + Future getTxn( + bool write, + Future Function(T txn) callback, + ) { + final currentTxn = Zone.current[_zoneTxn] as T?; + if (currentTxn != null) { + if (!currentTxn.active) { + throw IsarError('Transaction is not active anymore. Make sure to await ' + 'all your asynchronous code within transactions to prevent it from ' + 'being closed prematurely.'); + } else if (write && !currentTxn.write) { + throw IsarError('Operation cannot be performed within a read ' + 'transaction. Use isar.writeTxn() instead.'); + } else if (currentTxn.isar != this) { + throw IsarError('Transaction does not match Isar instance. ' + 'Make sure to use transactions from the same Isar instance.'); + } + return callback(currentTxn); + } else if (!write) { + return _beginTxn(false, false, () { + return callback(Zone.current[_zoneTxn] as T); + }); + } else { + throw IsarError('Write operations require an explicit transaction. ' + 'Wrap your code in isar.writeTxn()'); + } + } + + /// @nodoc + Transaction beginTxnSync(bool write, bool silent); + + T _beginTxnSync(bool write, bool silent, T Function() callback) { + requireOpen(); + _requireNotInTxn(); + + if (write && _asyncWriteTxnsActive > 0) { + throw IsarError( + 'An async write transaction is already in progress in this isolate. ' + 'You cannot begin a sync write transaction until it is finished. ' + 'Use asynchroneous transactions if you want to queue multiple write ' + 'transactions.', + ); + } + + final txn = beginTxnSync(write, silent); + _currentTxnSync = txn; + + T result; + try { + result = callback(); + txn.commitSync(); + } catch (e) { + txn.abortSync(); + rethrow; + } finally { + _currentTxnSync = null; + txn.free(); + } + + return result; + } + + @override + T txnSync(T Function() callback) { + return _beginTxnSync(false, false, callback); + } + + @override + T writeTxnSync(T Function() callback, {bool silent = false}) { + return _beginTxnSync(true, silent, callback); + } + + /// @nodoc + R getTxnSync( + bool write, + R Function(T txn) callback, + ) { + if (_currentTxnSync != null) { + if (write && !_currentTxnSync!.write) { + throw IsarError( + 'Operation cannot be performed within a read transaction. ' + 'Use isar.writeTxnSync() instead.', + ); + } + return callback(_currentTxnSync! as T); + } else if (!write) { + return _beginTxnSync(false, false, () => callback(_currentTxnSync! as T)); + } else { + throw IsarError('Write operations require an explicit transaction. ' + 'Wrap your code in isar.writeTxnSync()'); + } + } + + @override + Future close({bool deleteFromDisk = false}) async { + requireOpen(); + _requireNotInTxn(); + await Future.wait(_activeAsyncTxns); + await super.close(); + + return performClose(deleteFromDisk); + } + + /// @nodoc + bool performClose(bool deleteFromDisk); +} + +/// @nodoc +abstract class Transaction { + /// @nodoc + Transaction(this.isar, this.sync, this.write); + + /// @nodoc + final Isar isar; + + /// @nodoc + final bool sync; + + /// @nodoc + final bool write; + + /// @nodoc + bool get active; + + /// @nodoc + Future commit(); + + /// @nodoc + void commitSync(); + + /// @nodoc + Future abort(); + + /// @nodoc + void abortSync(); + + /// @nodoc + void free() {} +} diff --git a/lib/src/common/isar_link_base_impl.dart b/lib/src/common/isar_link_base_impl.dart new file mode 100644 index 0000000..1a19192 --- /dev/null +++ b/lib/src/common/isar_link_base_impl.dart @@ -0,0 +1,108 @@ +import 'package:isar/isar.dart'; + +/// @nodoc +abstract class IsarLinkBaseImpl implements IsarLinkBase { + var _initialized = false; + + Id? _objectId; + + /// The isar name of the link + late final String linkName; + + /// The origin collection of the link. For backlinks it is actually the target + /// collection. + late final IsarCollection sourceCollection; + + /// The target collection of the link. For backlinks it is actually the origin + /// collection. + late final IsarCollection targetCollection; + + @override + bool get isAttached => _objectId != null; + + @override + void attach( + IsarCollection sourceCollection, + IsarCollection targetCollection, + String linkName, + Id? objectId, + ) { + if (_initialized) { + if (linkName != this.linkName || + !identical(sourceCollection, this.sourceCollection) || + !identical(targetCollection, this.targetCollection)) { + throw IsarError( + 'Link has been moved! It is not allowed to move ' + 'a link to a different collection.', + ); + } + } else { + _initialized = true; + this.sourceCollection = sourceCollection; + this.targetCollection = targetCollection; + this.linkName = linkName; + } + + _objectId = objectId; + } + + /// Returns the containing object's id or throws an exception if this link has + /// not been attached to an object yet. + Id requireAttached() { + if (_objectId == null) { + throw IsarError( + 'Containing object needs to be managed by Isar to use this method. ' + 'Use collection.put(yourObject) to add it to the database.', + ); + } else { + return _objectId!; + } + } + + /// Returns the id of a linked object. + Id Function(OBJ obj) get getId; + + /// Returns the id of a linked object or throws an exception if the id is + /// `null` or set to `Isar.autoIncrement`. + Id requireGetId(OBJ object) { + final id = getId(object); + if (id != Isar.autoIncrement) { + return id; + } else { + throw IsarError( + 'Object "$object" has no id and can therefore not be linked. ' + 'Make sure to .put() objects before you use them in links.', + ); + } + } + + /// See [IsarLinks.filter]. + QueryBuilder filter() { + final containingId = requireAttached(); + final qb = QueryBuilderInternal( + collection: targetCollection, + whereClauses: [ + LinkWhereClause( + linkCollection: sourceCollection.name, + linkName: linkName, + id: containingId, + ), + ], + ); + return QueryBuilder(qb); + } + + /// See [IsarLinks.update]. + Future update({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }); + + /// See [IsarLinks.updateSync]. + void updateSync({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }); +} diff --git a/lib/src/common/isar_link_common.dart b/lib/src/common/isar_link_common.dart new file mode 100644 index 0000000..410ccd3 --- /dev/null +++ b/lib/src/common/isar_link_common.dart @@ -0,0 +1,92 @@ +import 'package:isar/isar.dart'; +import 'package:isar/src/common/isar_link_base_impl.dart'; + +const bool _kIsWeb = identical(0, 0.0); + +/// @nodoc +abstract class IsarLinkCommon extends IsarLinkBaseImpl + with IsarLink { + OBJ? _value; + + @override + bool isChanged = false; + + @override + bool isLoaded = false; + + @override + OBJ? get value { + if (isAttached && !isLoaded && !isChanged && !_kIsWeb) { + loadSync(); + } + return _value; + } + + @override + set value(OBJ? value) { + isChanged |= !identical(_value, value); + _value = value; + isLoaded = true; + } + + @override + Future load() async { + _value = await filter().findFirst(); + isChanged = false; + isLoaded = true; + } + + @override + void loadSync() { + _value = filter().findFirstSync(); + isChanged = false; + isLoaded = true; + } + + @override + Future save() async { + if (!isChanged) { + return; + } + + final object = value; + + await update(link: [if (object != null) object], reset: true); + isChanged = false; + isLoaded = true; + } + + @override + void saveSync() { + if (!isChanged) { + return; + } + + final object = _value; + updateSync(link: [if (object != null) object], reset: true); + + isChanged = false; + isLoaded = true; + } + + @override + Future reset() async { + await update(reset: true); + _value = null; + isChanged = false; + isLoaded = true; + } + + @override + void resetSync() { + updateSync(reset: true); + _value = null; + isChanged = false; + isLoaded = true; + } + + @override + String toString() { + return 'IsarLink($_value)'; + } +} diff --git a/lib/src/common/isar_links_common.dart b/lib/src/common/isar_links_common.dart new file mode 100644 index 0000000..98975b7 --- /dev/null +++ b/lib/src/common/isar_links_common.dart @@ -0,0 +1,223 @@ +import 'dart:collection'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/common/isar_link_base_impl.dart'; + +const bool _kIsWeb = identical(0, 0.0); + +/// @nodoc +abstract class IsarLinksCommon extends IsarLinkBaseImpl + with IsarLinks, SetMixin { + final _objects = {}; + + /// @nodoc + final addedObjects = HashSet.identity(); + + /// @nodoc + final removedObjects = HashSet.identity(); + + @override + bool isLoaded = false; + + @override + bool get isChanged => addedObjects.isNotEmpty || removedObjects.isNotEmpty; + + Map get _loadedObjects { + if (isAttached && !isLoaded && !_kIsWeb) { + loadSync(); + } + return _objects; + } + + @override + void attach( + IsarCollection sourceCollection, + IsarCollection targetCollection, + String linkName, + Id? objectId, + ) { + super.attach(sourceCollection, targetCollection, linkName, objectId); + + _applyAddedRemoved(); + } + + @override + Future load({bool overrideChanges = false}) async { + final objects = await filter().findAll(); + _applyLoaded(objects, overrideChanges); + } + + @override + void loadSync({bool overrideChanges = false}) { + final objects = filter().findAllSync(); + _applyLoaded(objects, overrideChanges); + } + + void _applyLoaded(List objects, bool overrideChanges) { + _objects.clear(); + for (final object in objects) { + final id = getId(object); + if (id != Isar.autoIncrement) { + _objects[id] = object; + } + } + + if (overrideChanges) { + addedObjects.clear(); + removedObjects.clear(); + } else { + _applyAddedRemoved(); + } + + isLoaded = true; + } + + void _applyAddedRemoved() { + for (final object in addedObjects) { + final id = getId(object); + if (id != Isar.autoIncrement) { + _objects[id] = object; + } + } + + for (final object in removedObjects) { + final id = getId(object); + if (id != Isar.autoIncrement) { + _objects.remove(id); + } + } + } + + @override + Future save() async { + if (!isChanged) { + return; + } + + await update(link: addedObjects, unlink: removedObjects); + + addedObjects.clear(); + removedObjects.clear(); + isLoaded = true; + } + + @override + void saveSync() { + if (!isChanged) { + return; + } + + updateSync(link: addedObjects, unlink: removedObjects); + + addedObjects.clear(); + removedObjects.clear(); + isLoaded = true; + } + + @override + Future reset() async { + await update(reset: true); + clear(); + isLoaded = true; + } + + @override + void resetSync() { + updateSync(reset: true); + clear(); + isLoaded = true; + } + + @override + bool add(OBJ value) { + if (isAttached) { + final id = getId(value); + if (id != Isar.autoIncrement) { + if (_objects.containsKey(id)) { + return false; + } + _objects[id] = value; + } + } + + removedObjects.remove(value); + return addedObjects.add(value); + } + + @override + bool contains(Object? element) { + requireAttached(); + + if (element is OBJ) { + final id = getId(element); + if (id != Isar.autoIncrement) { + return _loadedObjects.containsKey(id); + } + } + return false; + } + + @override + Iterator get iterator => _loadedObjects.values.iterator; + + @override + int get length => _loadedObjects.length; + + @override + OBJ? lookup(Object? element) { + requireAttached(); + + if (element is OBJ) { + final id = getId(element); + if (id != Isar.autoIncrement) { + return _loadedObjects[id]; + } + } + return null; + } + + @override + bool remove(Object? value) { + if (value is! OBJ) { + return false; + } + + if (isAttached) { + final id = getId(value); + if (id != Isar.autoIncrement) { + if (isLoaded && !_objects.containsKey(id)) { + return false; + } + _objects.remove(id); + } + } + + addedObjects.remove(value); + return removedObjects.add(value); + } + + @override + Set toSet() { + requireAttached(); + return HashSet( + equals: (o1, o2) => getId(o1) == getId(o2), + // ignore: noop_primitive_operations + hashCode: (o) => getId(o).toInt(), + isValidKey: (o) => o is OBJ && getId(o) != Isar.autoIncrement, + )..addAll(_loadedObjects.values); + } + + @override + void clear() { + _objects.clear(); + addedObjects.clear(); + removedObjects.clear(); + } + + @override + String toString() { + final content = + IterableBase.iterableToFullString(_objects.values, '{', '}'); + return 'IsarLinks($content)'; + } +} diff --git a/lib/src/common/schemas.dart b/lib/src/common/schemas.dart new file mode 100644 index 0000000..06085d8 --- /dev/null +++ b/lib/src/common/schemas.dart @@ -0,0 +1,13 @@ +import 'package:isar/isar.dart'; + +/// @nodoc +List> getSchemas( + List> collectionSchemas, +) { + final schemas = >{}; + for (final collectionSchema in collectionSchemas) { + schemas.add(collectionSchema); + schemas.addAll(collectionSchema.embeddedSchemas.values); + } + return schemas.toList(); +} diff --git a/lib/src/isar.dart b/lib/src/isar.dart new file mode 100644 index 0000000..a67ed71 --- /dev/null +++ b/lib/src/isar.dart @@ -0,0 +1,347 @@ +part of isar; + +/// Callback for a newly opened Isar instance. +typedef IsarOpenCallback = void Function(Isar isar); + +/// Callback for a release Isar instance. +typedef IsarCloseCallback = void Function(String isarName); + +/// An instance of the Isar Database. +abstract class Isar { + /// @nodoc + @protected + Isar(this.name) { + _instances[name] = this; + for (final callback in _openCallbacks) { + callback(this); + } + } + + /// The version of the Isar library. + static const version = '3.1.0+1'; + + /// Smallest valid id. + static const Id minId = isarMinId; + + /// Largest valid id. + static const Id maxId = isarMaxId; + + /// The default Isar instance name. + static const String defaultName = 'default'; + + /// The default max Isar size. + static const int defaultMaxSizeMiB = 1024; + + /// Placeholder for an auto-increment id. + static const Id autoIncrement = isarAutoIncrementId; + + static final Map _instances = {}; + static final Set _openCallbacks = {}; + static final Set _closeCallbacks = {}; + + /// Name of the instance. + final String name; + + /// The directory containing the database file or `null` on the web. + String? get directory; + + /// The full path of the database file is `directory/name.isar` and the lock + /// file `directory/name.isar.lock`. + String? get path => directory != null ? '$directory/$name.isar' : null; + + late final Map> _collections; + late final Map> _collectionsByName; + + bool _isOpen = true; + + static void _checkOpen(String name, List> schemas) { + if (name.isEmpty || name.startsWith('_')) { + throw IsarError('Instance names must not be empty or start with "_".'); + } + if (_instances.containsKey(name)) { + throw IsarError('Instance has already been opened.'); + } + if (schemas.isEmpty) { + throw IsarError('At least one collection needs to be opened.'); + } + + final schemaNames = {}; + for (final schema in schemas) { + if (!schemaNames.add(schema.name)) { + throw IsarError('Duplicate collection ${schema.name}.'); + } + } + for (final schema in schemas) { + final dependencies = schema.links.values.map((e) => e.target); + for (final dependency in dependencies) { + if (!schemaNames.contains(dependency)) { + throw IsarError( + "Collection ${schema.name} depends on $dependency but it's schema " + 'was not provided.', + ); + } + } + } + } + + /// Open a new Isar instance. + static Future open( + List> schemas, { + required String directory, + String name = defaultName, + int maxSizeMiB = Isar.defaultMaxSizeMiB, + bool relaxedDurability = true, + CompactCondition? compactOnLaunch, + bool inspector = true, + }) { + _checkOpen(name, schemas); + + /// Tree shake the inspector for profile and release builds. + assert(() { + if (!_kIsWeb && inspector) { + _IsarConnect.initialize(schemas); + } + return true; + }()); + + return openIsar( + schemas: schemas, + directory: directory, + name: name, + maxSizeMiB: maxSizeMiB, + relaxedDurability: relaxedDurability, + compactOnLaunch: compactOnLaunch, + ); + } + + /// Open a new Isar instance. + static Isar openSync( + List> schemas, { + required String directory, + String name = defaultName, + int maxSizeMiB = Isar.defaultMaxSizeMiB, + bool relaxedDurability = true, + CompactCondition? compactOnLaunch, + bool inspector = true, + }) { + _checkOpen(name, schemas); + + /// Tree shake the inspector for profile and release builds. + assert(() { + if (!_kIsWeb && inspector) { + _IsarConnect.initialize(schemas); + } + return true; + }()); + + return openIsarSync( + schemas: schemas, + directory: directory, + name: name, + maxSizeMiB: maxSizeMiB, + relaxedDurability: relaxedDurability, + compactOnLaunch: compactOnLaunch, + ); + } + + /// Is the instance open? + bool get isOpen => _isOpen; + + /// @nodoc + @protected + void requireOpen() { + if (!isOpen) { + throw IsarError('Isar instance has already been closed'); + } + } + + /// Executes an asynchronous read-only transaction. + Future txn(Future Function() callback); + + /// Executes an asynchronous read-write transaction. + /// + /// If [silent] is `true`, watchers are not notified about changes in this + /// transaction. + Future writeTxn(Future Function() callback, {bool silent = false}); + + /// Executes a synchronous read-only transaction. + T txnSync(T Function() callback); + + /// Executes a synchronous read-write transaction. + /// + /// If [silent] is `true`, watchers are not notified about changes in this + /// transaction. + T writeTxnSync(T Function() callback, {bool silent = false}); + + /// @nodoc + @protected + void attachCollections(Map> collections) { + _collections = collections; + _collectionsByName = { + for (IsarCollection col in collections.values) col.name: col, + }; + } + + /// Get a collection by its type. + /// + /// You should use the generated extension methods instead. + IsarCollection collection() { + requireOpen(); + final collection = _collections[T]; + if (collection == null) { + throw IsarError('Missing ${T.runtimeType}Schema in Isar.open'); + } + return collection as IsarCollection; + } + + /// @nodoc + @protected + IsarCollection? getCollectionByNameInternal(String name) { + return _collectionsByName[name]; + } + + /// Remove all data in this instance and reset the auto increment values. + Future clear() async { + for (final col in _collections.values) { + await col.clear(); + } + } + + /// Remove all data in this instance and reset the auto increment values. + void clearSync() { + for (final col in _collections.values) { + col.clearSync(); + } + } + + /// Returns the size of all the collections in bytes. Not supported on web. + /// + /// This method is extremely fast and independent of the number of objects in + /// the instance. + Future getSize({bool includeIndexes = false, bool includeLinks = false}); + + /// Returns the size of all collections in bytes. Not supported on web. + /// + /// This method is extremely fast and independent of the number of objects in + /// the instance. + int getSizeSync({bool includeIndexes = false, bool includeLinks = false}); + + /// Copy a compacted version of the database to the specified file. + /// + /// If you want to backup your database, you should always use a compacted + /// version. Compacted does not mean compressed. + /// + /// Do not run this method while other transactions are active to avoid + /// unnecessary growth of the database. + Future copyToFile(String targetPath); + + /// Releases an Isar instance. + /// + /// If this is the only isolate that holds a reference to this instance, the + /// Isar instance will be closed. [deleteFromDisk] additionally removes all + /// database files if enabled. + /// + /// Returns whether the instance was actually closed. + Future close({bool deleteFromDisk = false}) { + requireOpen(); + _isOpen = false; + if (identical(_instances[name], this)) { + _instances.remove(name); + } + for (final callback in _closeCallbacks) { + callback(name); + } + return Future.value(false); + } + + /// Verifies the integrity of the database file. + /// + /// Do not use this method in production apps. + @visibleForTesting + @experimental + Future verify(); + + /// A list of all Isar instances opened in the current isolate. + static Set get instanceNames => _instances.keys.toSet(); + + /// Returns an Isar instance opened in the current isolate by its name. If + /// no name is provided, the default instance is returned. + static Isar? getInstance([String name = defaultName]) { + return _instances[name]; + } + + /// Registers a listener that is called whenever an Isar instance is opened. + static void addOpenListener(IsarOpenCallback callback) { + _openCallbacks.add(callback); + } + + /// Removes a previously registered `IsarOpenCallback`. + static void removeOpenListener(IsarOpenCallback callback) { + _openCallbacks.remove(callback); + } + + /// Registers a listener that is called whenever an Isar instance is + /// released. + static void addCloseListener(IsarCloseCallback callback) { + _closeCallbacks.add(callback); + } + + /// Removes a previously registered `IsarOpenCallback`. + static void removeCloseListener(IsarCloseCallback callback) { + _closeCallbacks.remove(callback); + } + + /// Initialize Isar Core manually. You need to provide Isar Core libraries + /// for every platform your app will run on. + /// + /// If [download] is `true`, Isar will attempt to download the correct + /// library and place it in the specified path or the script directory. + /// + /// Be careful if multiple unit tests try to download the library at the + /// same time. Always use `flutter test -j 1` when you rely on auto + /// downloading to ensure that only one test is running at a time. + /// + /// Only use this method for non-Flutter code or unit tests. + static Future initializeIsarCore({ + Map libraries = const {}, + bool download = false, + }) async { + await initializeCoreBinary( + libraries: libraries, + download: download, + ); + } + + /// Split a String into words according to Unicode Annex #29. Only words + /// containing at least one alphanumeric character will be included. + static List splitWords(String input) => isarSplitWords(input); +} + +/// Isar databases can contain unused space that will be reused for later +/// operations. You can specify conditions to trigger manual compaction where +/// the entire database is copied and unused space freed. +/// +/// This operation can only be performed while a database is being opened and +/// should only be used if absolutely necessary. +class CompactCondition { + /// Compaction will happen if all of the specified conditions are true. + const CompactCondition({ + this.minFileSize, + this.minBytes, + this.minRatio, + }) : assert( + minFileSize != null || minBytes != null || minRatio != null, + 'At least one condition needs to be specified.', + ); + + /// The minimum size in bytes of the database file to trigger compaction. It + /// is highly discouraged to trigger compaction solely on this condition. + final int? minFileSize; + + /// The minimum number of bytes that can be freed with compaction. + final int? minBytes; + + /// The minimum compaction ration. For example `2.0` would trigger compaction + /// as soon as the file size can be halved. + final double? minRatio; +} diff --git a/lib/src/isar_collection.dart b/lib/src/isar_collection.dart new file mode 100644 index 0000000..2d2b814 --- /dev/null +++ b/lib/src/isar_collection.dart @@ -0,0 +1,342 @@ +part of isar; + +/// Normal keys consist of a single object, composite keys multiple. +typedef IndexKey = List; + +/// Use `IsarCollection` instances to find, query, and create new objects of a +/// given type in Isar. +/// +/// You can get an instance of `IsarCollection` by calling `isar.get()` or +/// by using the generated `isar.yourCollections` getter. +abstract class IsarCollection { + /// The corresponding Isar instance. + Isar get isar; + + /// Get the schema of the collection. + CollectionSchema get schema; + + /// The name of the collection. + String get name => schema.name; + + /// {@template col_get} + /// Get a single object by its [id] or `null` if the object does not exist. + /// {@endtemplate} + Future get(Id id) { + return getAll([id]).then((List objects) => objects[0]); + } + + /// {@macro col_get} + OBJ? getSync(Id id) { + return getAllSync([id])[0]; + } + + /// {@template col_get_all} + /// Get a list of objects by their [ids] or `null` if an object does not + /// exist. + /// {@endtemplate} + Future> getAll(List ids); + + /// {@macro col_get_all} + List getAllSync(List ids); + + /// {@template col_get_by_index} + /// Get a single object by the unique index [indexName] and [key]. + /// + /// Returns `null` if the object does not exist. + /// + /// If possible, you should use the generated type-safe methods instead. + /// {@endtemplate} + @experimental + Future getByIndex(String indexName, IndexKey key) { + return getAllByIndex(indexName, [key]) + .then((List objects) => objects[0]); + } + + /// {@macro col_get_by_index} + @experimental + OBJ? getByIndexSync(String indexName, IndexKey key) { + return getAllByIndexSync(indexName, [key])[0]; + } + + /// {@template col_get_all_by_index} + /// Get a list of objects by the unique index [indexName] and [keys]. + /// + /// Returns `null` if the object does not exist. + /// + /// If possible, you should use the generated type-safe methods instead. + /// {@endtemplate} + @experimental + Future> getAllByIndex(String indexName, List keys); + + /// {@macro col_get_all_by_index}' + @experimental + List getAllByIndexSync(String indexName, List keys); + + /// {@template col_put} + /// Insert or update an [object]. Returns the id of the new or updated object. + /// + /// If the object has an non-final id property, it will be set to the assigned + /// id. Otherwise you should use the returned id to update the object. + /// {@endtemplate} + Future put(OBJ object) { + return putAll([object]).then((List ids) => ids[0]); + } + + /// {@macro col_put} + Id putSync(OBJ object, {bool saveLinks = true}) { + return putAllSync([object], saveLinks: saveLinks)[0]; + } + + /// {@template col_put_all} + /// Insert or update a list of [objects]. Returns the list of ids of the new + /// or updated objects. + /// + /// If the objects have an non-final id property, it will be set to the + /// assigned id. Otherwise you should use the returned ids to update the + /// objects. + /// {@endtemplate} + Future> putAll(List objects); + + /// {@macro col_put_all} + List putAllSync(List objects, {bool saveLinks = true}); + + /// {@template col_put_by_index} + /// Insert or update the [object] by the unique index [indexName]. Returns the + /// id of the new or updated object. + /// + /// If there is already an object with the same index key, it will be + /// updated and all links will be preserved. Otherwise a new object will be + /// inserted. + /// + /// If the object has an non-final id property, it will be set to the assigned + /// id. Otherwise you should use the returned id to update the object. + /// + /// If possible, you should use the generated type-safe methods instead. + /// {@endtemplate} + @experimental + Future putByIndex(String indexName, OBJ object) { + return putAllByIndex(indexName, [object]).then((List ids) => ids[0]); + } + + /// {@macro col_put_by_index} + @experimental + Id putByIndexSync(String indexName, OBJ object, {bool saveLinks = true}) { + return putAllByIndexSync(indexName, [object])[0]; + } + + /// {@template col_put_all_by_index} + /// Insert or update a list of [objects] by the unique index [indexName]. + /// Returns the list of ids of the new or updated objects. + /// + /// If there is already an object with the same index key, it will be + /// updated and all links will be preserved. Otherwise a new object will be + /// inserted. + /// + /// If the objects have an non-final id property, it will be set to the + /// assigned id. Otherwise you should use the returned ids to update the + /// objects. + /// + /// If possible, you should use the generated type-safe methods instead. + /// {@endtemplate} + @experimental + Future> putAllByIndex(String indexName, List objects); + + /// {@macro col_put_all_by_index} + @experimental + List putAllByIndexSync( + String indexName, + List objects, { + bool saveLinks = true, + }); + + /// {@template col_delete} + /// Delete a single object by its [id]. + /// + /// Returns whether the object has been deleted. Isar web always returns + /// `true`. + /// {@endtemplate} + Future delete(Id id) { + return deleteAll([id]).then((int count) => count == 1); + } + + /// {@macro col_delete} + bool deleteSync(Id id) { + return deleteAllSync([id]) == 1; + } + + /// {@template col_delete_all} + /// Delete a list of objects by their [ids]. + /// + /// Returns the number of objects that have been deleted. Isar web always + /// returns `ids.length`. + /// {@endtemplate} + Future deleteAll(List ids); + + /// {@macro col_delete_all} + int deleteAllSync(List ids); + + /// {@template col_delete_by_index} + /// Delete a single object by the unique index [indexName] and [key]. + /// + /// Returns whether the object has been deleted. Isar web always returns + /// `true`. + /// {@endtemplate} + @experimental + Future deleteByIndex(String indexName, IndexKey key) { + return deleteAllByIndex(indexName, [key]).then((int count) => count == 1); + } + + /// {@macro col_delete_by_index} + @experimental + bool deleteByIndexSync(String indexName, IndexKey key) { + return deleteAllByIndexSync(indexName, [key]) == 1; + } + + /// {@template col_delete_all_by_index} + /// Delete a list of objects by the unique index [indexName] and [keys]. + /// + /// Returns the number of objects that have been deleted. Isar web always + /// returns `keys.length`. + /// {@endtemplate} + @experimental + Future deleteAllByIndex(String indexName, List keys); + + /// {@macro col_delete_all_by_index} + @experimental + int deleteAllByIndexSync(String indexName, List keys); + + /// {@template col_clear} + /// Remove all data in this collection and reset the auto increment value. + /// {@endtemplate} + Future clear(); + + /// {@macro col_clear} + void clearSync(); + + /// {@template col_import_json_raw} + /// Import a list of json objects encoded as a byte array. + /// + /// The json objects must have the same structure as the objects in this + /// collection. Otherwise an exception will be thrown. + /// {@endtemplate} + Future importJsonRaw(Uint8List jsonBytes); + + /// {@macro col_import_json_raw} + void importJsonRawSync(Uint8List jsonBytes); + + /// {@template col_import_json} + /// Import a list of json objects. + /// + /// The json objects must have the same structure as the objects in this + /// collection. Otherwise an exception will be thrown. + /// {@endtemplate} + Future importJson(List> json); + + /// {@macro col_import_json} + void importJsonSync(List> json); + + /// Start building a query using the [QueryBuilder]. + /// + /// You can use where clauses to only return [distinct] results. If you want + /// to reverse the order, set [sort] to [Sort.desc]. + QueryBuilder where({ + bool distinct = false, + Sort sort = Sort.asc, + }) { + final qb = QueryBuilderInternal( + collection: this, + whereDistinct: distinct, + whereSort: sort, + ); + return QueryBuilder(qb); + } + + /// Start building a query using the [QueryBuilder]. + /// + /// Shortcut if you don't want to use where clauses. + QueryBuilder filter() => where().filter(); + + /// Build a query dynamically for example to build a custom query language. + /// + /// It is highly discouraged to use this method. Only in very special cases + /// should it be used. If you open an issue please always mention that you + /// used this method. + /// + /// The type argument [R] needs to be equal to [OBJ] if no [property] is + /// specified. Otherwise it should be the type of the property. + @experimental + Query buildQuery({ + List whereClauses = const [], + bool whereDistinct = false, + Sort whereSort = Sort.asc, + FilterOperation? filter, + List sortBy = const [], + List distinctBy = const [], + int? offset, + int? limit, + String? property, + }); + + /// {@template col_count} + /// Returns the total number of objects in this collection. + /// + /// For non-web apps, this method is extremely fast and independent of the + /// number of objects in the collection. + /// {@endtemplate} + Future count(); + + /// {@macro col_count} + int countSync(); + + /// {@template col_size} + /// Returns the size of the collection in bytes. Not supported on web. + /// + /// For non-web apps, this method is extremely fast and independent of the + /// number of objects in the collection. + /// {@endtemplate} + Future getSize({bool includeIndexes = false, bool includeLinks = false}); + + /// {@macro col_size} + int getSizeSync({bool includeIndexes = false, bool includeLinks = false}); + + /// Watch the collection for changes. + /// + /// If [fireImmediately] is `true`, an event will be fired immediately. + Stream watchLazy({bool fireImmediately = false}); + + /// Watch the object with [id] for changes. If a change occurs, the new object + /// will be returned in the stream. + /// + /// Objects that don't exist (yet) can also be watched. If [fireImmediately] + /// is `true`, the object will be sent to the consumer immediately. + Stream watchObject(Id id, {bool fireImmediately = false}); + + /// Watch the object with [id] for changes. + /// + /// If [fireImmediately] is `true`, an event will be fired immediately. + Stream watchObjectLazy(Id id, {bool fireImmediately = false}); + + /// Verifies the integrity of the collection and its indexes. + /// + /// Throws an exception if the collection does not contain exactly the + /// provided [objects]. + /// + /// Do not use this method in production apps. + @visibleForTesting + @experimental + Future verify(List objects); + + /// Verifies the integrity of a link. + /// + /// Throws an exception if not exactly [sourceIds] as linked to the + /// [targetIds]. + /// + /// Do not use this method in production apps. + @visibleForTesting + @experimental + Future verifyLink( + String linkName, + List sourceIds, + List targetIds, + ); +} diff --git a/lib/src/isar_connect.dart b/lib/src/isar_connect.dart new file mode 100644 index 0000000..b6eee88 --- /dev/null +++ b/lib/src/isar_connect.dart @@ -0,0 +1,263 @@ +// coverage:ignore-file +// ignore_for_file: avoid_print + +part of isar; + +abstract class _IsarConnect { + static const Map Function(Map _)> _handlers = { + ConnectAction.getSchema: _getSchema, + ConnectAction.listInstances: _listInstances, + ConnectAction.watchInstance: _watchInstance, + ConnectAction.executeQuery: _executeQuery, + ConnectAction.removeQuery: _removeQuery, + ConnectAction.importJson: _importJson, + ConnectAction.exportJson: _exportJson, + ConnectAction.editProperty: _editProperty, + }; + + static List>? _schemas; + + // ignore: cancel_subscriptions + static final _querySubscription = >[]; + static final List> _collectionSubscriptions = + >[]; + + static void initialize(List> schemas) { + if (_schemas != null) { + return; + } + _schemas = schemas; + + Isar.addOpenListener((_) { + postEvent(ConnectEvent.instancesChanged.event, {}); + }); + + Isar.addCloseListener((_) { + postEvent(ConnectEvent.instancesChanged.event, {}); + }); + + for (final handler in _handlers.entries) { + registerExtension(handler.key.method, + (String method, Map parameters) async { + try { + final args = parameters.containsKey('args') + ? jsonDecode(parameters['args']!) as Map + : {}; + final result = {'result': await handler.value(args)}; + return ServiceExtensionResponse.result(jsonEncode(result)); + } catch (e) { + return ServiceExtensionResponse.error( + ServiceExtensionResponse.extensionError, + e.toString(), + ); + } + }); + } + + _printConnection(); + } + + static void _printConnection() { + Service.getInfo().then((ServiceProtocolInfo info) { + final serviceUri = info.serverUri; + if (serviceUri == null) { + return; + } + final port = serviceUri.port; + var path = serviceUri.path; + if (path.endsWith('/')) { + path = path.substring(0, path.length - 1); + } + if (path.endsWith('=')) { + path = path.substring(0, path.length - 1); + } + final url = ' https://inspect.isar.dev/${Isar.version}/#/$port$path '; + String line(String text, String fill) { + final fillCount = url.length - text.length; + final left = List.filled(fillCount ~/ 2, fill); + final right = List.filled(fillCount - left.length, fill); + return left.join() + text + right.join(); + } + + print('โ•”${line('', 'โ•')}โ•—'); + print('โ•‘${line('ISAR CONNECT STARTED', ' ')}โ•‘'); + print('โ•Ÿ${line('', 'โ”€')}โ•ข'); + print('โ•‘${line('Open the link to connect to the Isar', ' ')}โ•‘'); + print('โ•‘${line('Inspector while this build is running.', ' ')}โ•‘'); + print('โ•Ÿ${line('', 'โ”€')}โ•ข'); + print('โ•‘$urlโ•‘'); + print('โ•š${line('', 'โ•')}โ•'); + }); + } + + static Future _getSchema(Map _) async { + return _schemas!.map((e) => e.toJson()).toList(); + } + + static Future _listInstances(Map _) async { + return Isar.instanceNames.toList(); + } + + static Future _watchInstance(Map params) async { + for (final sub in _collectionSubscriptions) { + unawaited(sub.cancel()); + } + + _collectionSubscriptions.clear(); + if (params.isEmpty) { + return true; + } + + final instanceName = params['instance'] as String; + final instance = Isar.getInstance(instanceName)!; + + for (final collection in instance._collections.values) { + final sub = collection.watchLazy(fireImmediately: true).listen((_) { + _sendCollectionInfo(collection); + }); + _collectionSubscriptions.add(sub); + } + + return true; + } + + static void _sendCollectionInfo(IsarCollection collection) { + final count = collection.countSync(); + final size = collection.getSizeSync( + includeIndexes: true, + includeLinks: true, + ); + final collectionInfo = ConnectCollectionInfo( + instance: collection.isar.name, + collection: collection.name, + size: size, + count: count, + ); + postEvent( + ConnectEvent.collectionInfoChanged.event, + collectionInfo.toJson(), + ); + } + + static Future> _executeQuery( + Map params, + ) async { + for (final sub in _querySubscription) { + unawaited(sub.cancel()); + } + _querySubscription.clear(); + + final cQuery = ConnectQuery.fromJson(params); + final instance = Isar.getInstance(cQuery.instance)!; + + final links = + _schemas!.firstWhere((e) => e.name == cQuery.collection).links.values; + + final query = cQuery.toQuery(); + params.remove('limit'); + params.remove('offset'); + final countQuery = ConnectQuery.fromJson(params).toQuery(); + + _querySubscription.add( + query.watchLazy().listen((_) { + postEvent(ConnectEvent.queryChanged.event, {}); + }), + ); + final subscribed = {cQuery.collection}; + for (final link in links) { + if (subscribed.add(link.target)) { + final target = instance.getCollectionByNameInternal(link.target)!; + _querySubscription.add( + target.watchLazy().listen((_) { + postEvent(ConnectEvent.queryChanged.event, {}); + }), + ); + } + } + + final objects = await query.exportJson(); + if (links.isNotEmpty) { + final source = instance.getCollectionByNameInternal(cQuery.collection)!; + for (final object in objects) { + for (final link in links) { + final target = instance.getCollectionByNameInternal(link.target)!; + final links = await target.buildQuery( + whereClauses: [ + LinkWhereClause( + linkCollection: source.name, + linkName: link.name, + id: object[source.schema.idName] as int, + ), + ], + limit: link.single ? 1 : null, + ).exportJson(); + + if (link.single) { + object[link.name] = links.isEmpty ? null : links.first; + } else { + object[link.name] = links; + } + } + } + } + + return { + 'objects': objects, + 'count': await countQuery.count(), + }; + } + + static Future _removeQuery(Map params) async { + final query = ConnectQuery.fromJson(params).toQuery(); + await query.isar.writeTxn(query.deleteAll); + return true; + } + + static Future _importJson(Map params) async { + final instance = Isar.getInstance(params['instance'] as String)!; + final collection = + instance.getCollectionByNameInternal(params['collection'] as String)!; + final objects = (params['objects'] as List).cast>(); + await instance.writeTxn(() async { + await collection.importJson(objects); + }); + } + + static Future> _exportJson(Map params) async { + final query = ConnectQuery.fromJson(params).toQuery(); + return query.exportJson(); + } + + static Future _editProperty(Map params) async { + final cEdit = ConnectEdit.fromJson(params); + final isar = Isar.getInstance(cEdit.instance)!; + final collection = isar.getCollectionByNameInternal(cEdit.collection)!; + final keys = cEdit.path.split('.'); + + final query = collection.buildQuery( + whereClauses: [IdWhereClause.equalTo(value: cEdit.id)], + ); + + final objects = await query.exportJson(); + if (objects.isNotEmpty) { + dynamic object = objects.first; + for (var i = 0; i < keys.length; i++) { + if (i == keys.length - 1 && object is Map) { + object[keys[i]] = cEdit.value; + } else if (object is Map) { + object = object[keys[i]]; + } else if (object is List) { + object = object[int.parse(keys[i])]; + } + } + try { + await isar.writeTxn(() async { + await collection.importJson(objects); + }); + } catch (e) { + print(e); + } + } + } +} diff --git a/lib/src/isar_connect_api.dart b/lib/src/isar_connect_api.dart new file mode 100644 index 0000000..4f18287 --- /dev/null +++ b/lib/src/isar_connect_api.dart @@ -0,0 +1,215 @@ +// coverage:ignore-file +// ignore_for_file: public_member_api_docs + +import 'package:isar/isar.dart'; + +enum ConnectAction { + getSchema('ext.isar.getSchema'), + listInstances('ext.isar.listInstances'), + watchInstance('ext.isar.watchInstance'), + executeQuery('ext.isar.executeQuery'), + removeQuery('ext.isar.removeQuery'), + importJson('ext.isar.importJson'), + exportJson('ext.isar.exportJson'), + editProperty('ext.isar.editProperty'); + + const ConnectAction(this.method); + + final String method; +} + +enum ConnectEvent { + instancesChanged('isar.instancesChanged'), + queryChanged('isar.queryChanged'), + collectionInfoChanged('isar.collectionInfoChanged'); + + const ConnectEvent(this.event); + + final String event; +} + +class ConnectQuery { + ConnectQuery({ + required this.instance, + required this.collection, + this.filter, + this.offset, + this.limit, + this.sortProperty, + this.sortAsc, + }); + + factory ConnectQuery.fromJson(Map json) { + return ConnectQuery( + instance: json['instance'] as String, + collection: json['collection'] as String, + filter: _filterFromJson(json['filter'] as Map?), + offset: json['offset'] as int?, + limit: json['limit'] as int?, + sortProperty: json['sortProperty'] as String?, + sortAsc: json['sortAsc'] as bool?, + ); + } + + final String instance; + final String collection; + final FilterOperation? filter; + final int? offset; + final int? limit; + final String? sortProperty; + final bool? sortAsc; + + Map toJson() { + return { + 'instance': instance, + 'collection': collection, + if (filter != null) 'filter': _filterToJson(filter!), + if (offset != null) 'offset': offset, + if (limit != null) 'limit': limit, + if (sortProperty != null) 'sortProperty': sortProperty, + if (sortAsc != null) 'sortAsc': sortAsc, + }; + } + + static FilterOperation? _filterFromJson(Map? json) { + if (json == null) { + return null; + } + if (json.containsKey('filters')) { + final filters = (json['filters'] as List) + .map((e) => _filterFromJson(e as Map?)!) + .toList(); + return FilterGroup( + type: FilterGroupType.values[json['type'] as int], + filters: filters, + ); + } else { + return FilterCondition( + type: FilterConditionType.values[json['type'] as int], + property: json['property'] as String, + value1: json['value1'], + value2: json['value2'], + include1: json['include1'] as bool, + include2: json['include2'] as bool, + caseSensitive: json['caseSensitive'] as bool, + ); + } + } + + static Map _filterToJson(FilterOperation filter) { + if (filter is FilterCondition) { + return { + 'type': filter.type.index, + 'property': filter.property, + 'value1': filter.value1, + 'value2': filter.value2, + 'include1': filter.include1, + 'include2': filter.include2, + 'caseSensitive': filter.caseSensitive, + }; + } else if (filter is FilterGroup) { + return { + 'type': filter.type.index, + 'filters': filter.filters.map(_filterToJson).toList(), + }; + } else { + throw UnimplementedError(); + } + } + + Query toQuery() { + final isar = Isar.getInstance(instance)!; + // ignore: invalid_use_of_protected_member + final collection = isar.getCollectionByNameInternal(this.collection)!; + WhereClause? whereClause; + var whereSort = Sort.asc; + + SortProperty? sortProperty; + if (this.sortProperty != null) { + if (this.sortProperty == collection.schema.idName) { + whereClause = const IdWhereClause.any(); + whereSort = sortAsc == true ? Sort.asc : Sort.desc; + } else { + sortProperty = SortProperty( + property: this.sortProperty!, + sort: sortAsc == true ? Sort.asc : Sort.desc, + ); + } + } + return collection.buildQuery( + whereClauses: [if (whereClause != null) whereClause], + whereSort: whereSort, + filter: filter, + offset: offset, + limit: limit, + sortBy: [if (sortProperty != null) sortProperty], + ); + } +} + +class ConnectEdit { + ConnectEdit({ + required this.instance, + required this.collection, + required this.id, + required this.path, + required this.value, + }); + + factory ConnectEdit.fromJson(Map json) { + return ConnectEdit( + instance: json['instance'] as String, + collection: json['collection'] as String, + id: json['id'] as Id, + path: json['path'] as String, + value: json['value'], + ); + } + + final String instance; + final String collection; + final Id id; + final String path; + final dynamic value; + + Map toJson() { + return { + 'instance': instance, + 'collection': collection, + 'id': id, + 'path': path, + 'value': value, + }; + } +} + +class ConnectCollectionInfo { + ConnectCollectionInfo({ + required this.instance, + required this.collection, + required this.size, + required this.count, + }); + + factory ConnectCollectionInfo.fromJson(Map json) { + return ConnectCollectionInfo( + instance: json['instance'] as String, + collection: json['collection'] as String, + size: json['size'] as int, + count: json['count'] as int, + ); + } + final String instance; + final String collection; + final int size; + final int count; + + Map toJson() { + return { + 'instance': instance, + 'collection': collection, + 'size': size, + 'count': count, + }; + } +} diff --git a/lib/src/isar_error.dart b/lib/src/isar_error.dart new file mode 100644 index 0000000..18f92e1 --- /dev/null +++ b/lib/src/isar_error.dart @@ -0,0 +1,23 @@ +part of isar; + +/// An error raised by Isar. +class IsarError extends Error { + /// @nodoc + @protected + IsarError(this.message); + + /// The message + final String message; + + @override + String toString() { + return 'IsarError: $message'; + } +} + +/// This error is returned when a unique index constraint is violated. +class IsarUniqueViolationError extends IsarError { + /// @nodoc + @protected + IsarUniqueViolationError() : super('Unique index violated'); +} diff --git a/lib/src/isar_link.dart b/lib/src/isar_link.dart new file mode 100644 index 0000000..d589383 --- /dev/null +++ b/lib/src/isar_link.dart @@ -0,0 +1,113 @@ +part of isar; + +/// @nodoc +@sealed +abstract class IsarLinkBase { + /// Is the containing object managed by Isar? + bool get isAttached; + + /// Have the contents been changed? If not, `.save()` is a no-op. + bool get isChanged; + + /// Has this link been loaded? + bool get isLoaded; + + /// {@template link_load} + /// Loads the linked object(s) from the database + /// {@endtemplate} + Future load(); + + /// {@macro link_load} + void loadSync(); + + /// {@template link_save} + /// Saves the linked object(s) to the database if there are changes. + /// + /// Also puts new objects into the database that have id set to `null` or + /// `Isar.autoIncrement`. + /// {@endtemplate} + Future save(); + + /// {@macro link_save} + void saveSync(); + + /// {@template link_reset} + /// Unlinks all linked object(s). + /// + /// You can even call this method on links that have not been loaded yet. + /// {@endtemplate} + Future reset(); + + /// {@macro link_reset} + void resetSync(); + + /// @nodoc + @protected + void attach( + IsarCollection sourceCollection, + IsarCollection targetCollection, + String linkName, + Id? objectId, + ); +} + +/// Establishes a 1:1 relationship with the same or another collection. The +/// target collection is specified by the generic type argument. +abstract class IsarLink implements IsarLinkBase { + /// Create an empty, unattached link. Make sure to provide the correct + /// generic argument. + factory IsarLink() => IsarLinkImpl(); + + /// The linked object or `null` if no object is linked. + OBJ? get value; + + /// The linked object or `null` if no object is linked. + set value(OBJ? obj); +} + +/// Establishes a 1:n relationship with the same or another collection. The +/// target collection is specified by the generic type argument. +abstract class IsarLinks implements IsarLinkBase, Set { + /// Create an empty, unattached link. Make sure to provide the correct + /// generic argument. + factory IsarLinks() => IsarLinksImpl(); + + @override + Future load({bool overrideChanges = true}); + + @override + void loadSync({bool overrideChanges = true}); + + /// {@template links_update} + /// Creates and removes the specified links in the database. + /// + /// This operation does not alter the state of the local copy of this link + /// and it can even be used without loading the link. + /// {@endtemplate} + Future update({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }); + + /// {@macro links_update} + void updateSync({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }); + + /// Starts a query for linked objects. + QueryBuilder filter(); + + /// {@template links_count} + /// Counts the linked objects in the database. + /// + /// It does not take the local state into account and can even be used + /// without loading the link. + /// {@endtemplate} + Future count() => filter().count(); + + /// {@macro links_count} + int countSync() => filter().countSync(); +} diff --git a/lib/src/isar_reader.dart b/lib/src/isar_reader.dart new file mode 100644 index 0000000..1601ed9 --- /dev/null +++ b/lib/src/isar_reader.dart @@ -0,0 +1,88 @@ +// ignore_for_file: public_member_api_docs + +part of isar; + +/// @nodoc +@protected +abstract class IsarReader { + bool readBool(int offset); + + bool? readBoolOrNull(int offset); + + int readByte(int offset); + + int? readByteOrNull(int offset); + + int readInt(int offset); + + int? readIntOrNull(int offset); + + double readFloat(int offset); + + double? readFloatOrNull(int offset); + + int readLong(int offset); + + int? readLongOrNull(int offset); + + double readDouble(int offset); + + double? readDoubleOrNull(int offset); + + DateTime readDateTime(int offset); + + DateTime? readDateTimeOrNull(int offset); + + String readString(int offset); + + String? readStringOrNull(int offset); + + T? readObjectOrNull( + int offset, + Deserialize deserialize, + Map> allOffsets, + ); + + List? readBoolList(int offset); + + List? readBoolOrNullList(int offset); + + List? readByteList(int offset); + + List? readIntList(int offset); + + List? readIntOrNullList(int offset); + + List? readFloatList(int offset); + + List? readFloatOrNullList(int offset); + + List? readLongList(int offset); + + List? readLongOrNullList(int offset); + + List? readDoubleList(int offset); + + List? readDoubleOrNullList(int offset); + + List? readDateTimeList(int offset); + + List? readDateTimeOrNullList(int offset); + + List? readStringList(int offset); + + List? readStringOrNullList(int offset); + + List? readObjectList( + int offset, + Deserialize deserialize, + Map> allOffsets, + T defaultValue, + ); + + List? readObjectOrNullList( + int offset, + Deserialize deserialize, + Map> allOffsets, + ); +} diff --git a/lib/src/isar_writer.dart b/lib/src/isar_writer.dart new file mode 100644 index 0000000..518802a --- /dev/null +++ b/lib/src/isar_writer.dart @@ -0,0 +1,53 @@ +// ignore_for_file: public_member_api_docs + +part of isar; + +/// @nodoc +@protected +abstract class IsarWriter { + void writeBool(int offset, bool? value); + + void writeByte(int offset, int value); + + void writeInt(int offset, int? value); + + void writeFloat(int offset, double? value); + + void writeLong(int offset, int? value); + + void writeDouble(int offset, double? value); + + void writeDateTime(int offset, DateTime? value); + + void writeString(int offset, String? value); + + void writeObject( + int offset, + Map> allOffsets, + Serialize serialize, + T? value, + ); + + void writeByteList(int offset, List? values); + + void writeBoolList(int offset, List? values); + + void writeIntList(int offset, List? values); + + void writeFloatList(int offset, List? values); + + void writeLongList(int offset, List? values); + + void writeDoubleList(int offset, List? values); + + void writeDateTimeList(int offset, List? values); + + void writeStringList(int offset, List? values); + + void writeObjectList( + int offset, + Map> allOffsets, + Serialize serialize, + List? values, + ); +} diff --git a/lib/src/native/bindings.dart b/lib/src/native/bindings.dart new file mode 100644 index 0000000..734bc9d --- /dev/null +++ b/lib/src/native/bindings.dart @@ -0,0 +1,2241 @@ +// ignore_for_file: camel_case_types, non_constant_identifier_names + +// AUTO GENERATED FILE, DO NOT EDIT. +// +// Generated by `package:ffigen`. +import 'dart:ffi' as ffi; + +class IsarCoreBindings { + /// Holds the symbol lookup function. + final ffi.Pointer Function(String symbolName) + _lookup; + + /// The symbols are looked up in [dynamicLibrary]. + IsarCoreBindings(ffi.DynamicLibrary dynamicLibrary) + : _lookup = dynamicLibrary.lookup; + + /// The symbols are looked up with [lookup]. + IsarCoreBindings.fromLookup( + ffi.Pointer Function(String symbolName) + lookup) + : _lookup = lookup; + + ffi.Pointer isar_find_word_boundaries( + ffi.Pointer input_bytes, + int length, + ffi.Pointer number_words, + ) { + return _isar_find_word_boundaries( + input_bytes, + length, + number_words, + ); + } + + late final _isar_find_word_boundariesPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function(ffi.Pointer, ffi.Uint32, + ffi.Pointer)>>('isar_find_word_boundaries'); + late final _isar_find_word_boundaries = + _isar_find_word_boundariesPtr.asFunction< + ffi.Pointer Function( + ffi.Pointer, int, ffi.Pointer)>(); + + void isar_free_word_boundaries( + ffi.Pointer boundaries, + int word_count, + ) { + return _isar_free_word_boundaries( + boundaries, + word_count, + ); + } + + late final _isar_free_word_boundariesPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, + ffi.Uint32)>>('isar_free_word_boundaries'); + late final _isar_free_word_boundaries = _isar_free_word_boundariesPtr + .asFunction, int)>(); + + void isar_free_string( + ffi.Pointer string, + ) { + return _isar_free_string( + string, + ); + } + + late final _isar_free_stringPtr = + _lookup)>>( + 'isar_free_string'); + late final _isar_free_string = + _isar_free_stringPtr.asFunction)>(); + + ffi.Pointer isar_get_error( + int err_code, + ) { + return _isar_get_error( + err_code, + ); + } + + late final _isar_get_errorPtr = + _lookup Function(ffi.Int64)>>( + 'isar_get_error'); + late final _isar_get_error = + _isar_get_errorPtr.asFunction Function(int)>(); + + void isar_free_c_object_set( + ffi.Pointer ros, + ) { + return _isar_free_c_object_set( + ros, + ); + } + + late final _isar_free_c_object_setPtr = + _lookup)>>( + 'isar_free_c_object_set'); + late final _isar_free_c_object_set = _isar_free_c_object_setPtr + .asFunction)>(); + + int isar_get( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer object, + ) { + return _isar_get( + collection, + txn, + object, + ); + } + + late final _isar_getPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer, ffi.Pointer)>>('isar_get'); + late final _isar_get = _isar_getPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer)>(); + + int isar_get_by_index( + ffi.Pointer collection, + ffi.Pointer txn, + int index_id, + ffi.Pointer key, + ffi.Pointer object, + ) { + return _isar_get_by_index( + collection, + txn, + index_id, + key, + object, + ); + } + + late final _isar_get_by_indexPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Pointer, + ffi.Pointer)>>('isar_get_by_index'); + late final _isar_get_by_index = _isar_get_by_indexPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + ffi.Pointer, ffi.Pointer)>(); + + int isar_get_all( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer objects, + ) { + return _isar_get_all( + collection, + txn, + objects, + ); + } + + late final _isar_get_allPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer, ffi.Pointer)>>('isar_get_all'); + late final _isar_get_all = _isar_get_allPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer)>(); + + int isar_get_all_by_index( + ffi.Pointer collection, + ffi.Pointer txn, + int index_id, + ffi.Pointer> keys, + ffi.Pointer objects, + ) { + return _isar_get_all_by_index( + collection, + txn, + index_id, + keys, + objects, + ); + } + + late final _isar_get_all_by_indexPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Pointer>, + ffi.Pointer)>>('isar_get_all_by_index'); + late final _isar_get_all_by_index = _isar_get_all_by_indexPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + ffi.Pointer>, ffi.Pointer)>(); + + int isar_put( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer object, + ) { + return _isar_put( + collection, + txn, + object, + ); + } + + late final _isar_putPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer, ffi.Pointer)>>('isar_put'); + late final _isar_put = _isar_putPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer)>(); + + int isar_put_by_index( + ffi.Pointer collection, + ffi.Pointer txn, + int index_id, + ffi.Pointer object, + ) { + return _isar_put_by_index( + collection, + txn, + index_id, + object, + ); + } + + late final _isar_put_by_indexPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Pointer)>>('isar_put_by_index'); + late final _isar_put_by_index = _isar_put_by_indexPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + ffi.Pointer)>(); + + int isar_put_all( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer objects, + ) { + return _isar_put_all( + collection, + txn, + objects, + ); + } + + late final _isar_put_allPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer, ffi.Pointer)>>('isar_put_all'); + late final _isar_put_all = _isar_put_allPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer)>(); + + int isar_put_all_by_index( + ffi.Pointer collection, + ffi.Pointer txn, + int index_id, + ffi.Pointer objects, + ) { + return _isar_put_all_by_index( + collection, + txn, + index_id, + objects, + ); + } + + late final _isar_put_all_by_indexPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Pointer)>>('isar_put_all_by_index'); + late final _isar_put_all_by_index = _isar_put_all_by_indexPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + ffi.Pointer)>(); + + int isar_delete( + ffi.Pointer collection, + ffi.Pointer txn, + int id, + ffi.Pointer deleted, + ) { + return _isar_delete( + collection, + txn, + id, + deleted, + ); + } + + late final _isar_deletePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Int64, + ffi.Pointer)>>('isar_delete'); + late final _isar_delete = _isar_deletePtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + ffi.Pointer)>(); + + int isar_delete_by_index( + ffi.Pointer collection, + ffi.Pointer txn, + int index_id, + ffi.Pointer key, + ffi.Pointer deleted, + ) { + return _isar_delete_by_index( + collection, + txn, + index_id, + key, + deleted, + ); + } + + late final _isar_delete_by_indexPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Pointer, + ffi.Pointer)>>('isar_delete_by_index'); + late final _isar_delete_by_index = _isar_delete_by_indexPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + ffi.Pointer, ffi.Pointer)>(); + + int isar_delete_all( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer ids, + int ids_length, + ffi.Pointer count, + ) { + return _isar_delete_all( + collection, + txn, + ids, + ids_length, + count, + ); + } + + late final _isar_delete_allPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Uint32, + ffi.Pointer)>>('isar_delete_all'); + late final _isar_delete_all = _isar_delete_allPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer, int, ffi.Pointer)>(); + + int isar_delete_all_by_index( + ffi.Pointer collection, + ffi.Pointer txn, + int index_id, + ffi.Pointer> keys, + int keys_length, + ffi.Pointer count, + ) { + return _isar_delete_all_by_index( + collection, + txn, + index_id, + keys, + keys_length, + count, + ); + } + + late final _isar_delete_all_by_indexPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Pointer>, + ffi.Uint32, + ffi.Pointer)>>('isar_delete_all_by_index'); + late final _isar_delete_all_by_index = + _isar_delete_all_by_indexPtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer, + int, + ffi.Pointer>, + int, + ffi.Pointer)>(); + + int isar_clear( + ffi.Pointer collection, + ffi.Pointer txn, + ) { + return _isar_clear( + collection, + txn, + ); + } + + late final _isar_clearPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer)>>('isar_clear'); + late final _isar_clear = _isar_clearPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer)>(); + + int isar_json_import( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer id_name, + ffi.Pointer json_bytes, + int json_length, + ) { + return _isar_json_import( + collection, + txn, + id_name, + json_bytes, + json_length, + ); + } + + late final _isar_json_importPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Uint32)>>('isar_json_import'); + late final _isar_json_import = _isar_json_importPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer, ffi.Pointer, int)>(); + + int isar_count( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer count, + ) { + return _isar_count( + collection, + txn, + count, + ); + } + + late final _isar_countPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer, ffi.Pointer)>>('isar_count'); + late final _isar_count = _isar_countPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer)>(); + + int isar_get_size( + ffi.Pointer collection, + ffi.Pointer txn, + bool include_indexes, + bool include_links, + ffi.Pointer size, + ) { + return _isar_get_size( + collection, + txn, + include_indexes, + include_links, + size, + ); + } + + late final _isar_get_sizePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Bool, + ffi.Bool, + ffi.Pointer)>>('isar_get_size'); + late final _isar_get_size = _isar_get_sizePtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, bool, + bool, ffi.Pointer)>(); + + int isar_verify( + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer objects, + ) { + return _isar_verify( + collection, + txn, + objects, + ); + } + + late final _isar_verifyPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer, ffi.Pointer)>>('isar_verify'); + late final _isar_verify = _isar_verifyPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer)>(); + + void isar_connect_dart_api( + DartPostCObjectFnType ptr, + ) { + return _isar_connect_dart_api( + ptr, + ); + } + + late final _isar_connect_dart_apiPtr = + _lookup>( + 'isar_connect_dart_api'); + late final _isar_connect_dart_api = _isar_connect_dart_apiPtr + .asFunction(); + + void isar_filter_static( + ffi.Pointer> filter, + bool value, + ) { + return _isar_filter_static( + filter, + value, + ); + } + + late final _isar_filter_staticPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer>, + ffi.Bool)>>('isar_filter_static'); + late final _isar_filter_static = _isar_filter_staticPtr + .asFunction>, bool)>(); + + void isar_filter_and_or_xor( + ffi.Pointer> filter, + bool and, + bool exclusive, + ffi.Pointer> conditions, + int length, + ) { + return _isar_filter_and_or_xor( + filter, + and, + exclusive, + conditions, + length, + ); + } + + late final _isar_filter_and_or_xorPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer>, + ffi.Bool, + ffi.Bool, + ffi.Pointer>, + ffi.Uint32)>>('isar_filter_and_or_xor'); + late final _isar_filter_and_or_xor = _isar_filter_and_or_xorPtr.asFunction< + void Function(ffi.Pointer>, bool, bool, + ffi.Pointer>, int)>(); + + void isar_filter_not( + ffi.Pointer> filter, + ffi.Pointer condition, + ) { + return _isar_filter_not( + filter, + condition, + ); + } + + late final _isar_filter_notPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer>, + ffi.Pointer)>>('isar_filter_not'); + late final _isar_filter_not = _isar_filter_notPtr.asFunction< + void Function(ffi.Pointer>, ffi.Pointer)>(); + + int isar_filter_object( + ffi.Pointer collection, + ffi.Pointer> filter, + ffi.Pointer condition, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_object( + collection, + filter, + condition, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_objectPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_object'); + late final _isar_filter_object = _isar_filter_objectPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, ffi.Pointer, int, int)>(); + + int isar_filter_link( + ffi.Pointer collection, + ffi.Pointer> filter, + ffi.Pointer condition, + int link_id, + ) { + return _isar_filter_link( + collection, + filter, + condition, + link_id, + ); + } + + late final _isar_filter_linkPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + ffi.Uint64)>>('isar_filter_link'); + late final _isar_filter_link = _isar_filter_linkPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, ffi.Pointer, int)>(); + + int isar_filter_link_length( + ffi.Pointer collection, + ffi.Pointer> filter, + int lower, + int upper, + int link_id, + ) { + return _isar_filter_link_length( + collection, + filter, + lower, + upper, + link_id, + ); + } + + late final _isar_filter_link_lengthPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Uint32, + ffi.Uint32, + ffi.Uint64)>>('isar_filter_link_length'); + late final _isar_filter_link_length = _isar_filter_link_lengthPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, int, int, int)>(); + + int isar_filter_list_length( + ffi.Pointer collection, + ffi.Pointer> filter, + int lower, + int upper, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_list_length( + collection, + filter, + lower, + upper, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_list_lengthPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Uint32, + ffi.Uint32, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_list_length'); + late final _isar_filter_list_length = _isar_filter_list_lengthPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, int, int, int, int)>(); + + int isar_filter_null( + ffi.Pointer collection, + ffi.Pointer> filter, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_null( + collection, + filter, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_nullPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_null'); + late final _isar_filter_null = _isar_filter_nullPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, int, int)>(); + + void isar_filter_id( + ffi.Pointer> filter, + int lower, + bool include_lower, + int upper, + bool include_upper, + ) { + return _isar_filter_id( + filter, + lower, + include_lower, + upper, + include_upper, + ); + } + + late final _isar_filter_idPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer>, ffi.Int64, + ffi.Bool, ffi.Int64, ffi.Bool)>>('isar_filter_id'); + late final _isar_filter_id = _isar_filter_idPtr.asFunction< + void Function(ffi.Pointer>, int, bool, int, bool)>(); + + int isar_filter_long( + ffi.Pointer collection, + ffi.Pointer> filter, + int lower, + bool include_lower, + int upper, + bool include_upper, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_long( + collection, + filter, + lower, + include_lower, + upper, + include_upper, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_longPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Int64, + ffi.Bool, + ffi.Int64, + ffi.Bool, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_long'); + late final _isar_filter_long = _isar_filter_longPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, int, bool, int, bool, int, int)>(); + + int isar_filter_double( + ffi.Pointer collection, + ffi.Pointer> filter, + double lower, + double upper, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_double( + collection, + filter, + lower, + upper, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_doublePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Double, + ffi.Double, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_double'); + late final _isar_filter_double = _isar_filter_doublePtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, double, double, int, int)>(); + + int isar_filter_string( + ffi.Pointer collection, + ffi.Pointer> filter, + ffi.Pointer lower, + bool include_lower, + ffi.Pointer upper, + bool include_upper, + bool case_sensitive, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_string( + collection, + filter, + lower, + include_lower, + upper, + include_upper, + case_sensitive, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_stringPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + ffi.Bool, + ffi.Pointer, + ffi.Bool, + ffi.Bool, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_string'); + late final _isar_filter_string = _isar_filter_stringPtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + bool, + ffi.Pointer, + bool, + bool, + int, + int)>(); + + int isar_filter_string_starts_with( + ffi.Pointer collection, + ffi.Pointer> filter, + ffi.Pointer value, + bool case_sensitive, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_string_starts_with( + collection, + filter, + value, + case_sensitive, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_string_starts_withPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + ffi.Bool, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_string_starts_with'); + late final _isar_filter_string_starts_with = + _isar_filter_string_starts_withPtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + bool, + int, + int)>(); + + int isar_filter_string_ends_with( + ffi.Pointer collection, + ffi.Pointer> filter, + ffi.Pointer value, + bool case_sensitive, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_string_ends_with( + collection, + filter, + value, + case_sensitive, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_string_ends_withPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + ffi.Bool, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_string_ends_with'); + late final _isar_filter_string_ends_with = + _isar_filter_string_ends_withPtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + bool, + int, + int)>(); + + int isar_filter_string_contains( + ffi.Pointer collection, + ffi.Pointer> filter, + ffi.Pointer value, + bool case_sensitive, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_string_contains( + collection, + filter, + value, + case_sensitive, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_string_containsPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + ffi.Bool, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_string_contains'); + late final _isar_filter_string_contains = + _isar_filter_string_containsPtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + bool, + int, + int)>(); + + int isar_filter_string_matches( + ffi.Pointer collection, + ffi.Pointer> filter, + ffi.Pointer value, + bool case_sensitive, + int embedded_col_id, + int property_id, + ) { + return _isar_filter_string_matches( + collection, + filter, + value, + case_sensitive, + embedded_col_id, + property_id, + ); + } + + late final _isar_filter_string_matchesPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + ffi.Bool, + ffi.Uint64, + ffi.Uint64)>>('isar_filter_string_matches'); + late final _isar_filter_string_matches = + _isar_filter_string_matchesPtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer, + bool, + int, + int)>(); + + void isar_key_create( + ffi.Pointer> key, + ) { + return _isar_key_create( + key, + ); + } + + late final _isar_key_createPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer>)>>('isar_key_create'); + late final _isar_key_create = _isar_key_createPtr + .asFunction>)>(); + + bool isar_key_increase( + ffi.Pointer key, + ) { + return _isar_key_increase( + key, + ); + } + + late final _isar_key_increasePtr = + _lookup)>>( + 'isar_key_increase'); + late final _isar_key_increase = + _isar_key_increasePtr.asFunction)>(); + + bool isar_key_decrease( + ffi.Pointer key, + ) { + return _isar_key_decrease( + key, + ); + } + + late final _isar_key_decreasePtr = + _lookup)>>( + 'isar_key_decrease'); + late final _isar_key_decrease = + _isar_key_decreasePtr.asFunction)>(); + + void isar_key_add_byte( + ffi.Pointer key, + int value, + ) { + return _isar_key_add_byte( + key, + value, + ); + } + + late final _isar_key_add_bytePtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer, ffi.Uint8)>>('isar_key_add_byte'); + late final _isar_key_add_byte = _isar_key_add_bytePtr + .asFunction, int)>(); + + void isar_key_add_int( + ffi.Pointer key, + int value, + ) { + return _isar_key_add_int( + key, + value, + ); + } + + late final _isar_key_add_intPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer, ffi.Int32)>>('isar_key_add_int'); + late final _isar_key_add_int = _isar_key_add_intPtr + .asFunction, int)>(); + + void isar_key_add_long( + ffi.Pointer key, + int value, + ) { + return _isar_key_add_long( + key, + value, + ); + } + + late final _isar_key_add_longPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer, ffi.Int64)>>('isar_key_add_long'); + late final _isar_key_add_long = _isar_key_add_longPtr + .asFunction, int)>(); + + void isar_key_add_float( + ffi.Pointer key, + double value, + ) { + return _isar_key_add_float( + key, + value, + ); + } + + late final _isar_key_add_floatPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer, ffi.Double)>>('isar_key_add_float'); + late final _isar_key_add_float = _isar_key_add_floatPtr + .asFunction, double)>(); + + void isar_key_add_double( + ffi.Pointer key, + double value, + ) { + return _isar_key_add_double( + key, + value, + ); + } + + late final _isar_key_add_doublePtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer, ffi.Double)>>('isar_key_add_double'); + late final _isar_key_add_double = _isar_key_add_doublePtr + .asFunction, double)>(); + + void isar_key_add_string( + ffi.Pointer key, + ffi.Pointer value, + bool case_sensitive, + ) { + return _isar_key_add_string( + key, + value, + case_sensitive, + ); + } + + late final _isar_key_add_stringPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, ffi.Pointer, + ffi.Bool)>>('isar_key_add_string'); + late final _isar_key_add_string = _isar_key_add_stringPtr.asFunction< + void Function(ffi.Pointer, ffi.Pointer, bool)>(); + + void isar_key_add_string_hash( + ffi.Pointer key, + ffi.Pointer value, + bool case_sensitive, + ) { + return _isar_key_add_string_hash( + key, + value, + case_sensitive, + ); + } + + late final _isar_key_add_string_hashPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, ffi.Pointer, + ffi.Bool)>>('isar_key_add_string_hash'); + late final _isar_key_add_string_hash = + _isar_key_add_string_hashPtr.asFunction< + void Function(ffi.Pointer, ffi.Pointer, bool)>(); + + void isar_key_add_string_list_hash( + ffi.Pointer key, + ffi.Pointer> value, + int length, + bool case_sensitive, + ) { + return _isar_key_add_string_list_hash( + key, + value, + length, + case_sensitive, + ); + } + + late final _isar_key_add_string_list_hashPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Uint32, + ffi.Bool)>>('isar_key_add_string_list_hash'); + late final _isar_key_add_string_list_hash = + _isar_key_add_string_list_hashPtr.asFunction< + void Function(ffi.Pointer, + ffi.Pointer>, int, bool)>(); + + void isar_key_add_byte_list_hash( + ffi.Pointer key, + ffi.Pointer value, + int length, + ) { + return _isar_key_add_byte_list_hash( + key, + value, + length, + ); + } + + late final _isar_key_add_byte_list_hashPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, ffi.Pointer, + ffi.Uint32)>>('isar_key_add_byte_list_hash'); + late final _isar_key_add_byte_list_hash = + _isar_key_add_byte_list_hashPtr.asFunction< + void Function(ffi.Pointer, ffi.Pointer, int)>(); + + void isar_key_add_int_list_hash( + ffi.Pointer key, + ffi.Pointer value, + int length, + ) { + return _isar_key_add_int_list_hash( + key, + value, + length, + ); + } + + late final _isar_key_add_int_list_hashPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, ffi.Pointer, + ffi.Uint32)>>('isar_key_add_int_list_hash'); + late final _isar_key_add_int_list_hash = + _isar_key_add_int_list_hashPtr.asFunction< + void Function(ffi.Pointer, ffi.Pointer, int)>(); + + void isar_key_add_long_list_hash( + ffi.Pointer key, + ffi.Pointer value, + int length, + ) { + return _isar_key_add_long_list_hash( + key, + value, + length, + ); + } + + late final _isar_key_add_long_list_hashPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, ffi.Pointer, + ffi.Uint32)>>('isar_key_add_long_list_hash'); + late final _isar_key_add_long_list_hash = + _isar_key_add_long_list_hashPtr.asFunction< + void Function(ffi.Pointer, ffi.Pointer, int)>(); + + ffi.Pointer isar_version() { + return _isar_version(); + } + + late final _isar_versionPtr = + _lookup Function()>>( + 'isar_version'); + late final _isar_version = + _isar_versionPtr.asFunction Function()>(); + + int isar_instance_create( + ffi.Pointer> isar, + ffi.Pointer name, + ffi.Pointer path, + ffi.Pointer schema_json, + int max_size_mib, + bool relaxed_durability, + int compact_min_file_size, + int compact_min_bytes, + double compact_min_ratio, + ) { + return _isar_instance_create( + isar, + name, + path, + schema_json, + max_size_mib, + relaxed_durability, + compact_min_file_size, + compact_min_bytes, + compact_min_ratio, + ); + } + + late final _isar_instance_createPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer>, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Int64, + ffi.Bool, + ffi.Uint32, + ffi.Uint32, + ffi.Double)>>('isar_instance_create'); + late final _isar_instance_create = _isar_instance_createPtr.asFunction< + int Function( + ffi.Pointer>, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + int, + bool, + int, + int, + double)>(); + + void isar_instance_create_async( + ffi.Pointer> isar, + ffi.Pointer name, + ffi.Pointer path, + ffi.Pointer schema_json, + int max_size_mib, + bool relaxed_durability, + int compact_min_file_size, + int compact_min_bytes, + double compact_min_ratio, + int port, + ) { + return _isar_instance_create_async( + isar, + name, + path, + schema_json, + max_size_mib, + relaxed_durability, + compact_min_file_size, + compact_min_bytes, + compact_min_ratio, + port, + ); + } + + late final _isar_instance_create_asyncPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer>, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Int64, + ffi.Bool, + ffi.Uint32, + ffi.Uint32, + ffi.Double, + DartPort)>>('isar_instance_create_async'); + late final _isar_instance_create_async = + _isar_instance_create_asyncPtr.asFunction< + void Function( + ffi.Pointer>, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + int, + bool, + int, + int, + double, + int)>(); + + bool isar_instance_close( + ffi.Pointer isar, + ) { + return _isar_instance_close( + isar, + ); + } + + late final _isar_instance_closePtr = _lookup< + ffi.NativeFunction)>>( + 'isar_instance_close'); + late final _isar_instance_close = _isar_instance_closePtr + .asFunction)>(); + + bool isar_instance_close_and_delete( + ffi.Pointer isar, + ) { + return _isar_instance_close_and_delete( + isar, + ); + } + + late final _isar_instance_close_and_deletePtr = _lookup< + ffi.NativeFunction)>>( + 'isar_instance_close_and_delete'); + late final _isar_instance_close_and_delete = + _isar_instance_close_and_deletePtr + .asFunction)>(); + + ffi.Pointer isar_instance_get_path( + ffi.Pointer isar, + ) { + return _isar_instance_get_path( + isar, + ); + } + + late final _isar_instance_get_pathPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + ffi.Pointer)>>('isar_instance_get_path'); + late final _isar_instance_get_path = _isar_instance_get_pathPtr + .asFunction Function(ffi.Pointer)>(); + + int isar_instance_get_collection( + ffi.Pointer isar, + ffi.Pointer> collection, + int collection_id, + ) { + return _isar_instance_get_collection( + isar, + collection, + collection_id, + ); + } + + late final _isar_instance_get_collectionPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Uint64)>>('isar_instance_get_collection'); + late final _isar_instance_get_collection = + _isar_instance_get_collectionPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, int)>(); + + int isar_instance_get_size( + ffi.Pointer instance, + ffi.Pointer txn, + bool include_indexes, + bool include_links, + ffi.Pointer size, + ) { + return _isar_instance_get_size( + instance, + txn, + include_indexes, + include_links, + size, + ); + } + + late final _isar_instance_get_sizePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Bool, + ffi.Bool, + ffi.Pointer)>>('isar_instance_get_size'); + late final _isar_instance_get_size = _isar_instance_get_sizePtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, bool, + bool, ffi.Pointer)>(); + + void isar_instance_copy_to_file( + ffi.Pointer instance, + ffi.Pointer path, + int port, + ) { + return _isar_instance_copy_to_file( + instance, + path, + port, + ); + } + + late final _isar_instance_copy_to_filePtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, ffi.Pointer, + DartPort)>>('isar_instance_copy_to_file'); + late final _isar_instance_copy_to_file = + _isar_instance_copy_to_filePtr.asFunction< + void Function( + ffi.Pointer, ffi.Pointer, int)>(); + + int isar_instance_verify( + ffi.Pointer instance, + ffi.Pointer txn, + ) { + return _isar_instance_verify( + instance, + txn, + ); + } + + late final _isar_instance_verifyPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, + ffi.Pointer)>>('isar_instance_verify'); + late final _isar_instance_verify = _isar_instance_verifyPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer)>(); + + int isar_get_offsets( + ffi.Pointer collection, + int embedded_col_id, + ffi.Pointer offsets, + ) { + return _isar_get_offsets( + collection, + embedded_col_id, + offsets, + ); + } + + late final _isar_get_offsetsPtr = _lookup< + ffi.NativeFunction< + ffi.Uint32 Function(ffi.Pointer, ffi.Uint64, + ffi.Pointer)>>('isar_get_offsets'); + late final _isar_get_offsets = _isar_get_offsetsPtr.asFunction< + int Function( + ffi.Pointer, int, ffi.Pointer)>(); + + int isar_link( + ffi.Pointer collection, + ffi.Pointer txn, + int link_id, + int id, + int target_id, + ) { + return _isar_link( + collection, + txn, + link_id, + id, + target_id, + ); + } + + late final _isar_linkPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Int64, + ffi.Int64)>>('isar_link'); + late final _isar_link = _isar_linkPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + int, int)>(); + + int isar_link_unlink( + ffi.Pointer collection, + ffi.Pointer txn, + int link_id, + int id, + int target_id, + ) { + return _isar_link_unlink( + collection, + txn, + link_id, + id, + target_id, + ); + } + + late final _isar_link_unlinkPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Int64, + ffi.Int64)>>('isar_link_unlink'); + late final _isar_link_unlink = _isar_link_unlinkPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + int, int)>(); + + int isar_link_unlink_all( + ffi.Pointer collection, + ffi.Pointer txn, + int link_id, + int id, + ) { + return _isar_link_unlink_all( + collection, + txn, + link_id, + id, + ); + } + + late final _isar_link_unlink_allPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Int64)>>('isar_link_unlink_all'); + late final _isar_link_unlink_all = _isar_link_unlink_allPtr.asFunction< + int Function( + ffi.Pointer, ffi.Pointer, int, int)>(); + + int isar_link_update_all( + ffi.Pointer collection, + ffi.Pointer txn, + int link_id, + int id, + ffi.Pointer ids, + int link_count, + int unlink_count, + bool replace, + ) { + return _isar_link_update_all( + collection, + txn, + link_id, + id, + ids, + link_count, + unlink_count, + replace, + ); + } + + late final _isar_link_update_allPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Int64, + ffi.Pointer, + ffi.Uint32, + ffi.Uint32, + ffi.Bool)>>('isar_link_update_all'); + late final _isar_link_update_all = _isar_link_update_allPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + int, ffi.Pointer, int, int, bool)>(); + + int isar_link_verify( + ffi.Pointer collection, + ffi.Pointer txn, + int link_id, + ffi.Pointer ids, + int ids_count, + ) { + return _isar_link_verify( + collection, + txn, + link_id, + ids, + ids_count, + ); + } + + late final _isar_link_verifyPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Pointer, + ffi.Uint32)>>('isar_link_verify'); + late final _isar_link_verify = _isar_link_verifyPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, int, + ffi.Pointer, int)>(); + + ffi.Pointer isar_qb_create( + ffi.Pointer collection, + ) { + return _isar_qb_create( + collection, + ); + } + + late final _isar_qb_createPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + ffi.Pointer)>>('isar_qb_create'); + late final _isar_qb_create = _isar_qb_createPtr.asFunction< + ffi.Pointer Function(ffi.Pointer)>(); + + int isar_qb_add_id_where_clause( + ffi.Pointer builder, + int start_id, + int end_id, + ) { + return _isar_qb_add_id_where_clause( + builder, + start_id, + end_id, + ); + } + + late final _isar_qb_add_id_where_clausePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, ffi.Int64, + ffi.Int64)>>('isar_qb_add_id_where_clause'); + late final _isar_qb_add_id_where_clause = _isar_qb_add_id_where_clausePtr + .asFunction, int, int)>(); + + int isar_qb_add_index_where_clause( + ffi.Pointer builder, + int index_id, + ffi.Pointer lower_key, + ffi.Pointer upper_key, + bool sort_asc, + bool skip_duplicates, + ) { + return _isar_qb_add_index_where_clause( + builder, + index_id, + lower_key, + upper_key, + sort_asc, + skip_duplicates, + ); + } + + late final _isar_qb_add_index_where_clausePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Uint64, + ffi.Pointer, + ffi.Pointer, + ffi.Bool, + ffi.Bool)>>('isar_qb_add_index_where_clause'); + late final _isar_qb_add_index_where_clause = + _isar_qb_add_index_where_clausePtr.asFunction< + int Function(ffi.Pointer, int, ffi.Pointer, + ffi.Pointer, bool, bool)>(); + + int isar_qb_add_link_where_clause( + ffi.Pointer builder, + ffi.Pointer source_collection, + int link_id, + int id, + ) { + return _isar_qb_add_link_where_clause( + builder, + source_collection, + link_id, + id, + ); + } + + late final _isar_qb_add_link_where_clausePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Uint64, + ffi.Int64)>>('isar_qb_add_link_where_clause'); + late final _isar_qb_add_link_where_clause = + _isar_qb_add_link_where_clausePtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + int, int)>(); + + void isar_qb_set_filter( + ffi.Pointer builder, + ffi.Pointer filter, + ) { + return _isar_qb_set_filter( + builder, + filter, + ); + } + + late final _isar_qb_set_filterPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, + ffi.Pointer)>>('isar_qb_set_filter'); + late final _isar_qb_set_filter = _isar_qb_set_filterPtr.asFunction< + void Function(ffi.Pointer, ffi.Pointer)>(); + + int isar_qb_add_sort_by( + ffi.Pointer builder, + int property_id, + bool asc, + ) { + return _isar_qb_add_sort_by( + builder, + property_id, + asc, + ); + } + + late final _isar_qb_add_sort_byPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, ffi.Uint64, + ffi.Bool)>>('isar_qb_add_sort_by'); + late final _isar_qb_add_sort_by = _isar_qb_add_sort_byPtr + .asFunction, int, bool)>(); + + int isar_qb_add_distinct_by( + ffi.Pointer builder, + int property_id, + bool case_sensitive, + ) { + return _isar_qb_add_distinct_by( + builder, + property_id, + case_sensitive, + ); + } + + late final _isar_qb_add_distinct_byPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, ffi.Uint64, + ffi.Bool)>>('isar_qb_add_distinct_by'); + late final _isar_qb_add_distinct_by = _isar_qb_add_distinct_byPtr + .asFunction, int, bool)>(); + + void isar_qb_set_offset_limit( + ffi.Pointer builder, + int offset, + int limit, + ) { + return _isar_qb_set_offset_limit( + builder, + offset, + limit, + ); + } + + late final _isar_qb_set_offset_limitPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer, ffi.Int64, + ffi.Int64)>>('isar_qb_set_offset_limit'); + late final _isar_qb_set_offset_limit = _isar_qb_set_offset_limitPtr + .asFunction, int, int)>(); + + ffi.Pointer isar_qb_build( + ffi.Pointer builder, + ) { + return _isar_qb_build( + builder, + ); + } + + late final _isar_qb_buildPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + ffi.Pointer)>>('isar_qb_build'); + late final _isar_qb_build = _isar_qb_buildPtr + .asFunction Function(ffi.Pointer)>(); + + void isar_q_free( + ffi.Pointer query, + ) { + return _isar_q_free( + query, + ); + } + + late final _isar_q_freePtr = + _lookup)>>( + 'isar_q_free'); + late final _isar_q_free = + _isar_q_freePtr.asFunction)>(); + + int isar_q_find( + ffi.Pointer query, + ffi.Pointer txn, + ffi.Pointer result, + int limit, + ) { + return _isar_q_find( + query, + txn, + result, + limit, + ); + } + + late final _isar_q_findPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer, ffi.Uint32)>>('isar_q_find'); + late final _isar_q_find = _isar_q_findPtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer, int)>(); + + int isar_q_delete( + ffi.Pointer query, + ffi.Pointer collection, + ffi.Pointer txn, + int limit, + ffi.Pointer count, + ) { + return _isar_q_delete( + query, + collection, + txn, + limit, + count, + ); + } + + late final _isar_q_deletePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Uint32, + ffi.Pointer)>>('isar_q_delete'); + late final _isar_q_delete = _isar_q_deletePtr.asFunction< + int Function(ffi.Pointer, ffi.Pointer, + ffi.Pointer, int, ffi.Pointer)>(); + + int isar_q_export_json( + ffi.Pointer query, + ffi.Pointer collection, + ffi.Pointer txn, + ffi.Pointer id_name, + ffi.Pointer> json_bytes, + ffi.Pointer json_length, + ) { + return _isar_q_export_json( + query, + collection, + txn, + id_name, + json_bytes, + json_length, + ); + } + + late final _isar_q_export_jsonPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer)>>('isar_q_export_json'); + late final _isar_q_export_json = _isar_q_export_jsonPtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Pointer>, + ffi.Pointer)>(); + + void isar_free_json( + ffi.Pointer json_bytes, + int json_length, + ) { + return _isar_free_json( + json_bytes, + json_length, + ); + } + + late final _isar_free_jsonPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function( + ffi.Pointer, ffi.Uint32)>>('isar_free_json'); + late final _isar_free_json = _isar_free_jsonPtr + .asFunction, int)>(); + + int isar_q_aggregate( + ffi.Pointer collection, + ffi.Pointer query, + ffi.Pointer txn, + int operation, + int property_id, + ffi.Pointer> result, + ) { + return _isar_q_aggregate( + collection, + query, + txn, + operation, + property_id, + result, + ); + } + + late final _isar_q_aggregatePtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + ffi.Uint8, + ffi.Uint64, + ffi.Pointer>)>>( + 'isar_q_aggregate'); + late final _isar_q_aggregate = _isar_q_aggregatePtr.asFunction< + int Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + int, + int, + ffi.Pointer>)>(); + + int isar_q_aggregate_long_result( + ffi.Pointer result, + ) { + return _isar_q_aggregate_long_result( + result, + ); + } + + late final _isar_q_aggregate_long_resultPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function(ffi.Pointer)>>( + 'isar_q_aggregate_long_result'); + late final _isar_q_aggregate_long_result = _isar_q_aggregate_long_resultPtr + .asFunction)>(); + + double isar_q_aggregate_double_result( + ffi.Pointer result, + ) { + return _isar_q_aggregate_double_result( + result, + ); + } + + late final _isar_q_aggregate_double_resultPtr = _lookup< + ffi.NativeFunction< + ffi.Double Function(ffi.Pointer)>>( + 'isar_q_aggregate_double_result'); + late final _isar_q_aggregate_double_result = + _isar_q_aggregate_double_resultPtr + .asFunction)>(); + + int isar_txn_begin( + ffi.Pointer isar, + ffi.Pointer> txn, + bool sync1, + bool write, + bool silent, + int port, + ) { + return _isar_txn_begin( + isar, + txn, + sync1, + write, + silent, + port, + ); + } + + late final _isar_txn_beginPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, + ffi.Pointer>, + ffi.Bool, + ffi.Bool, + ffi.Bool, + DartPort)>>('isar_txn_begin'); + late final _isar_txn_begin = _isar_txn_beginPtr.asFunction< + int Function(ffi.Pointer, + ffi.Pointer>, bool, bool, bool, int)>(); + + int isar_txn_finish( + ffi.Pointer txn, + bool commit, + ) { + return _isar_txn_finish( + txn, + commit, + ); + } + + late final _isar_txn_finishPtr = _lookup< + ffi.NativeFunction< + ffi.Int64 Function( + ffi.Pointer, ffi.Bool)>>('isar_txn_finish'); + late final _isar_txn_finish = _isar_txn_finishPtr + .asFunction, bool)>(); + + ffi.Pointer isar_watch_collection( + ffi.Pointer isar, + ffi.Pointer collection, + int port, + ) { + return _isar_watch_collection( + isar, + collection, + port, + ); + } + + late final _isar_watch_collectionPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + ffi.Pointer, + ffi.Pointer, + DartPort)>>('isar_watch_collection'); + late final _isar_watch_collection = _isar_watch_collectionPtr.asFunction< + ffi.Pointer Function( + ffi.Pointer, ffi.Pointer, int)>(); + + ffi.Pointer isar_watch_object( + ffi.Pointer isar, + ffi.Pointer collection, + int id, + int port, + ) { + return _isar_watch_object( + isar, + collection, + id, + port, + ); + } + + late final _isar_watch_objectPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + ffi.Pointer, + ffi.Pointer, + ffi.Int64, + DartPort)>>('isar_watch_object'); + late final _isar_watch_object = _isar_watch_objectPtr.asFunction< + ffi.Pointer Function(ffi.Pointer, + ffi.Pointer, int, int)>(); + + ffi.Pointer isar_watch_query( + ffi.Pointer isar, + ffi.Pointer collection, + ffi.Pointer query, + int port, + ) { + return _isar_watch_query( + isar, + collection, + query, + port, + ); + } + + late final _isar_watch_queryPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + ffi.Pointer, + ffi.Pointer, + ffi.Pointer, + DartPort)>>('isar_watch_query'); + late final _isar_watch_query = _isar_watch_queryPtr.asFunction< + ffi.Pointer Function(ffi.Pointer, + ffi.Pointer, ffi.Pointer, int)>(); + + void isar_stop_watching( + ffi.Pointer handle, + ) { + return _isar_stop_watching( + handle, + ); + } + + late final _isar_stop_watchingPtr = + _lookup)>>( + 'isar_stop_watching'); + late final _isar_stop_watching = _isar_stop_watchingPtr + .asFunction)>(); +} + +class CObject extends ffi.Struct { + @ffi.Int64() + external int id; + + external ffi.Pointer buffer; + + @ffi.Uint32() + external int buffer_length; +} + +class CObjectSet extends ffi.Struct { + external ffi.Pointer objects; + + @ffi.Uint32() + external int length; +} + +class CIsarCollection extends ffi.Opaque {} + +class CIsarTxn extends ffi.Opaque {} + +class CIndexKey extends ffi.Opaque {} + +typedef DartPostCObjectFnType = ffi.Pointer< + ffi.NativeFunction)>>; +typedef DartPort = ffi.Int64; + +class CDartCObject extends ffi.Opaque {} + +class CFilter extends ffi.Opaque {} + +class CIsarInstance extends ffi.Opaque {} + +class CQueryBuilder extends ffi.Opaque {} + +class CQuery extends ffi.Opaque {} + +class CAggregationResult extends ffi.Opaque {} + +class CWatchHandle extends ffi.Opaque {} + +const int IsarIndex_MAX_STRING_INDEX_SIZE = 1024; + +const int IsarObject_NULL_BYTE = 0; + +const int IsarObject_NULL_BOOL = 0; + +const int IsarObject_FALSE_BOOL = 1; + +const int IsarObject_TRUE_BOOL = 2; + +const int IsarObject_NULL_INT = -2147483648; + +const int IsarObject_NULL_LONG = -9223372036854775808; + +const int IsarObject_MAX_SIZE = 33554432; + +const int SchemaManager_ISAR_FILE_VERSION = 2; diff --git a/lib/src/native/encode_string.dart b/lib/src/native/encode_string.dart new file mode 100644 index 0000000..a21aa54 --- /dev/null +++ b/lib/src/native/encode_string.dart @@ -0,0 +1,54 @@ +import 'dart:ffi'; +import 'dart:typed_data'; + +const int _oneByteLimit = 0x7f; // 7 bits +const int _twoByteLimit = 0x7ff; // 11 bits +const int _surrogateTagMask = 0xFC00; +const int _surrogateValueMask = 0x3FF; +const int _leadSurrogateMin = 0xD800; + +/// Encodes a Dart String to UTF8, writes it at [offset] into [buffer] and +/// returns the number of written bytes. +/// +/// The buffer needs to have a capacity of at least `offset + str.length * 3`. +int encodeString(String str, Uint8List buffer, int offset) { + final startOffset = offset; + for (var stringIndex = 0; stringIndex < str.length; stringIndex++) { + final codeUnit = str.codeUnitAt(stringIndex); + // ASCII has the same representation in UTF-8 and UTF-16. + if (codeUnit <= _oneByteLimit) { + buffer[offset++] = codeUnit; + } else if ((codeUnit & _surrogateTagMask) == _leadSurrogateMin) { + // combine surrogate pair + final nextCodeUnit = str.codeUnitAt(++stringIndex); + final rune = 0x10000 + ((codeUnit & _surrogateValueMask) << 10) | + (nextCodeUnit & _surrogateValueMask); + // If the rune is encoded with 2 code-units then it must be encoded + // with 4 bytes in UTF-8. + buffer[offset++] = 0xF0 | (rune >> 18); + buffer[offset++] = 0x80 | ((rune >> 12) & 0x3f); + buffer[offset++] = 0x80 | ((rune >> 6) & 0x3f); + buffer[offset++] = 0x80 | (rune & 0x3f); + } else if (codeUnit <= _twoByteLimit) { + buffer[offset++] = 0xC0 | (codeUnit >> 6); + buffer[offset++] = 0x80 | (codeUnit & 0x3f); + } else { + buffer[offset++] = 0xE0 | (codeUnit >> 12); + buffer[offset++] = 0x80 | ((codeUnit >> 6) & 0x3f); + buffer[offset++] = 0x80 | (codeUnit & 0x3f); + } + } + return offset - startOffset; +} + +/// @nodoc +extension CString on String { + /// Create a zero terminated C-String from a Dart String + Pointer toCString(Allocator alloc) { + final bufferPtr = alloc(length * 3 + 1); + final buffer = bufferPtr.asTypedList(length * 3 + 1); + final size = encodeString(this, buffer, 0); + buffer[size] = 0; + return bufferPtr.cast(); + } +} diff --git a/lib/src/native/index_key.dart b/lib/src/native/index_key.dart new file mode 100644 index 0000000..0422578 --- /dev/null +++ b/lib/src/native/index_key.dart @@ -0,0 +1,257 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:ffi'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/isar.dart'; +import 'package:isar/src/native/bindings.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/isar_writer_impl.dart'; + +final _keyPtrPtr = malloc>(); + +Pointer buildIndexKey( + CollectionSchema schema, + IndexSchema index, + IndexKey key, +) { + if (key.length > index.properties.length) { + throw IsarError('Invalid number of values for index ${index.name}.'); + } + + IC.isar_key_create(_keyPtrPtr); + final keyPtr = _keyPtrPtr.value; + + for (var i = 0; i < key.length; i++) { + final indexProperty = index.properties[i]; + _addKeyValue( + keyPtr, + key[i], + schema.property(indexProperty.name), + indexProperty.type, + indexProperty.caseSensitive, + ); + } + + return keyPtr; +} + +Pointer buildLowerUnboundedIndexKey() { + IC.isar_key_create(_keyPtrPtr); + return _keyPtrPtr.value; +} + +Pointer buildUpperUnboundedIndexKey() { + IC.isar_key_create(_keyPtrPtr); + final keyPtr = _keyPtrPtr.value; + IC.isar_key_add_long(keyPtr, maxLong); + + return keyPtr; +} + +void _addKeyValue( + Pointer keyPtr, + Object? value, + PropertySchema property, + IndexType type, + bool caseSensitive, +) { + if (property.enumMap != null) { + if (value is Enum) { + value = property.enumMap![value.name]; + } else if (value is List) { + value = value.map((e) { + if (e is Enum) { + return property.enumMap![e.name]; + } else { + return e; + } + }).toList(); + } + } + + final isarType = + type != IndexType.hash ? property.type.scalarType : property.type; + switch (isarType) { + case IsarType.bool: + IC.isar_key_add_byte(keyPtr, (value as bool?).byteValue); + break; + case IsarType.byte: + IC.isar_key_add_byte(keyPtr, (value ?? 0) as int); + break; + case IsarType.int: + IC.isar_key_add_int(keyPtr, (value as int?) ?? nullInt); + break; + case IsarType.float: + IC.isar_key_add_float(keyPtr, (value as double?) ?? nullFloat); + break; + case IsarType.long: + IC.isar_key_add_long(keyPtr, (value as int?) ?? nullLong); + break; + case IsarType.double: + IC.isar_key_add_double(keyPtr, (value as double?) ?? nullDouble); + break; + case IsarType.dateTime: + IC.isar_key_add_long(keyPtr, (value as DateTime?).longValue); + break; + case IsarType.string: + final strPtr = _strToNative(value as String?); + if (type == IndexType.value) { + IC.isar_key_add_string(keyPtr, strPtr, caseSensitive); + } else { + IC.isar_key_add_string_hash(keyPtr, strPtr, caseSensitive); + } + _freeStr(strPtr); + break; + case IsarType.boolList: + if (value == null) { + IC.isar_key_add_byte_list_hash(keyPtr, nullptr, 0); + } else { + value as List; + final boolListPtr = malloc(value.length); + boolListPtr + .asTypedList(value.length) + .setAll(0, value.map((e) => e.byteValue)); + IC.isar_key_add_byte_list_hash(keyPtr, boolListPtr, value.length); + malloc.free(boolListPtr); + } + break; + case IsarType.byteList: + if (value == null) { + IC.isar_key_add_byte_list_hash(keyPtr, nullptr, 0); + } else { + value as List; + final bytesPtr = malloc(value.length); + bytesPtr.asTypedList(value.length).setAll(0, value); + IC.isar_key_add_byte_list_hash(keyPtr, bytesPtr, value.length); + malloc.free(bytesPtr); + } + break; + case IsarType.intList: + if (value == null) { + IC.isar_key_add_int_list_hash(keyPtr, nullptr, 0); + } else { + value as List; + final intListPtr = malloc(value.length); + intListPtr + .asTypedList(value.length) + .setAll(0, value.map((e) => e ?? nullInt)); + IC.isar_key_add_int_list_hash(keyPtr, intListPtr, value.length); + malloc.free(intListPtr); + } + break; + case IsarType.longList: + if (value == null) { + IC.isar_key_add_long_list_hash(keyPtr, nullptr, 0); + } else { + value as List; + final longListPtr = malloc(value.length); + longListPtr + .asTypedList(value.length) + .setAll(0, value.map((e) => e ?? nullLong)); + IC.isar_key_add_long_list_hash(keyPtr, longListPtr, value.length); + malloc.free(longListPtr); + } + break; + case IsarType.dateTimeList: + if (value == null) { + IC.isar_key_add_long_list_hash(keyPtr, nullptr, 0); + } else { + value as List; + final longListPtr = malloc(value.length); + for (var i = 0; i < value.length; i++) { + longListPtr[i] = value[i].longValue; + } + IC.isar_key_add_long_list_hash(keyPtr, longListPtr, value.length); + } + break; + case IsarType.stringList: + if (value == null) { + IC.isar_key_add_string_list_hash(keyPtr, nullptr, 0, false); + } else { + value as List; + final stringListPtr = malloc>(value.length); + for (var i = 0; i < value.length; i++) { + stringListPtr[i] = _strToNative(value[i]); + } + IC.isar_key_add_string_list_hash( + keyPtr, + stringListPtr, + value.length, + caseSensitive, + ); + for (var i = 0; i < value.length; i++) { + _freeStr(stringListPtr[i]); + } + } + break; + case IsarType.object: + case IsarType.floatList: + case IsarType.doubleList: + case IsarType.objectList: + throw IsarError('Unsupported property type.'); + } +} + +Pointer _strToNative(String? str) { + if (str == null) { + return Pointer.fromAddress(0); + } else { + return str.toCString(malloc); + } +} + +void _freeStr(Pointer strPtr) { + if (!strPtr.isNull) { + malloc.free(strPtr); + } +} + +double? adjustFloatBound({ + required double? value, + required bool lowerBound, + required bool include, + required double epsilon, +}) { + value ??= double.nan; + + if (lowerBound) { + if (include) { + if (value.isFinite) { + return value - epsilon; + } + } else { + if (value.isNaN) { + return double.negativeInfinity; + } else if (value == double.negativeInfinity) { + return -double.maxFinite; + } else if (value == double.maxFinite) { + return double.infinity; + } else if (value == double.infinity) { + return null; + } else { + return value + epsilon; + } + } + } else { + if (include) { + if (value.isFinite) { + return value + epsilon; + } + } else { + if (value.isNaN) { + return null; + } else if (value == double.negativeInfinity) { + return double.nan; + } else if (value == -double.maxFinite) { + return double.negativeInfinity; + } else if (value == double.infinity) { + return double.maxFinite; + } else { + return value - epsilon; + } + } + } + return value; +} diff --git a/lib/src/native/isar_collection_impl.dart b/lib/src/native/isar_collection_impl.dart new file mode 100644 index 0000000..0551ee3 --- /dev/null +++ b/lib/src/native/isar_collection_impl.dart @@ -0,0 +1,649 @@ +// ignore_for_file: public_member_api_docs, invalid_use_of_protected_member + +import 'dart:async'; +import 'dart:convert'; +import 'dart:ffi'; +import 'dart:isolate'; +import 'dart:typed_data'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/isar.dart'; +import 'package:isar/src/native/bindings.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/index_key.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/isar_impl.dart'; +import 'package:isar/src/native/isar_reader_impl.dart'; +import 'package:isar/src/native/isar_writer_impl.dart'; +import 'package:isar/src/native/query_build.dart'; +import 'package:isar/src/native/txn.dart'; + +class IsarCollectionImpl extends IsarCollection { + IsarCollectionImpl({ + required this.isar, + required this.ptr, + required this.schema, + }); + + @override + final IsarImpl isar; + final Pointer ptr; + + @override + final CollectionSchema schema; + + late final _offsets = isar.offsets[OBJ]!; + late final _staticSize = _offsets.last; + + @pragma('vm:prefer-inline') + OBJ deserializeObject(CObject cObj) { + final buffer = cObj.buffer.asTypedList(cObj.buffer_length); + final reader = IsarReaderImpl(buffer); + final object = schema.deserialize( + cObj.id, + reader, + _offsets, + isar.offsets, + ); + schema.attach(this, cObj.id, object); + return object; + } + + @pragma('vm:prefer-inline') + OBJ? deserializeObjectOrNull(CObject cObj) { + if (!cObj.buffer.isNull) { + return deserializeObject(cObj); + } else { + return null; + } + } + + @pragma('vm:prefer-inline') + List deserializeObjects(CObjectSet objectSet) { + final objects = []; + for (var i = 0; i < objectSet.length; i++) { + final cObjPtr = objectSet.objects.elementAt(i); + final object = deserializeObject(cObjPtr.ref); + objects.add(object); + } + return objects; + } + + @pragma('vm:prefer-inline') + List deserializeObjectsOrNull(CObjectSet objectSet) { + final objects = List.filled(objectSet.length, null); + for (var i = 0; i < objectSet.length; i++) { + final cObj = objectSet.objects.elementAt(i).ref; + if (!cObj.buffer.isNull) { + objects[i] = deserializeObject(cObj); + } + } + return objects; + } + + @pragma('vm:prefer-inline') + Pointer> _getKeysPtr( + String indexName, + List keys, + Allocator alloc, + ) { + final keysPtrPtr = alloc>(keys.length); + for (var i = 0; i < keys.length; i++) { + keysPtrPtr[i] = buildIndexKey(schema, schema.index(indexName), keys[i]); + } + return keysPtrPtr; + } + + List deserializeProperty(CObjectSet objectSet, int? propertyId) { + final values = []; + if (propertyId != null) { + final propertyOffset = _offsets[propertyId]; + for (var i = 0; i < objectSet.length; i++) { + final cObj = objectSet.objects.elementAt(i).ref; + final buffer = cObj.buffer.asTypedList(cObj.buffer_length); + values.add( + schema.deserializeProp( + IsarReaderImpl(buffer), + propertyId, + propertyOffset, + isar.offsets, + ) as T, + ); + } + } else { + for (var i = 0; i < objectSet.length; i++) { + final cObj = objectSet.objects.elementAt(i).ref; + values.add(cObj.id as T); + } + } + return values; + } + + void serializeObjects( + Txn txn, + Pointer objectsPtr, + List objects, + ) { + var maxBufferSize = 0; + for (var i = 0; i < objects.length; i++) { + final object = objects[i]; + maxBufferSize += schema.estimateSize(object, _offsets, isar.offsets); + } + final bufferPtr = txn.alloc(maxBufferSize); + final buffer = bufferPtr.asTypedList(maxBufferSize).buffer; + + var writtenBytes = 0; + for (var i = 0; i < objects.length; i++) { + final objBuffer = buffer.asUint8List(writtenBytes); + final binaryWriter = IsarWriterImpl(objBuffer, _staticSize); + + final object = objects[i]; + schema.serialize( + object, + binaryWriter, + _offsets, + isar.offsets, + ); + final size = binaryWriter.usedBytes; + + final cObj = objectsPtr.elementAt(i).ref; + cObj.id = schema.getId(object); + cObj.buffer = bufferPtr.elementAt(writtenBytes); + cObj.buffer_length = size; + + writtenBytes += size; + } + } + + @override + Future> getAll(List ids) { + return isar.getTxn(false, (Txn txn) async { + final cObjSetPtr = txn.newCObjectSet(ids.length); + final objectsPtr = cObjSetPtr.ref.objects; + for (var i = 0; i < ids.length; i++) { + objectsPtr.elementAt(i).ref.id = ids[i]; + } + IC.isar_get_all(ptr, txn.ptr, cObjSetPtr); + await txn.wait(); + return deserializeObjectsOrNull(cObjSetPtr.ref); + }); + } + + @override + List getAllSync(List ids) { + return isar.getTxnSync(false, (Txn txn) { + final cObjPtr = txn.getCObject(); + final cObj = cObjPtr.ref; + + final objects = List.filled(ids.length, null); + for (var i = 0; i < ids.length; i++) { + cObj.id = ids[i]; + nCall(IC.isar_get(ptr, txn.ptr, cObjPtr)); + objects[i] = deserializeObjectOrNull(cObj); + } + + return objects; + }); + } + + @override + Future> getAllByIndex(String indexName, List keys) { + return isar.getTxn(false, (Txn txn) async { + final cObjSetPtr = txn.newCObjectSet(keys.length); + final keysPtrPtr = _getKeysPtr(indexName, keys, txn.alloc); + IC.isar_get_all_by_index( + ptr, + txn.ptr, + schema.index(indexName).id, + keysPtrPtr, + cObjSetPtr, + ); + await txn.wait(); + return deserializeObjectsOrNull(cObjSetPtr.ref); + }); + } + + @override + List getAllByIndexSync(String indexName, List keys) { + final index = schema.index(indexName); + + return isar.getTxnSync(false, (Txn txn) { + final cObjPtr = txn.getCObject(); + final cObj = cObjPtr.ref; + + final objects = List.filled(keys.length, null); + for (var i = 0; i < keys.length; i++) { + final keyPtr = buildIndexKey(schema, index, keys[i]); + nCall(IC.isar_get_by_index(ptr, txn.ptr, index.id, keyPtr, cObjPtr)); + objects[i] = deserializeObjectOrNull(cObj); + } + + return objects; + }); + } + + @override + int putSync(OBJ object, {bool saveLinks = true}) { + return isar.getTxnSync(true, (Txn txn) { + return putByIndexSyncInternal( + txn: txn, + object: object, + saveLinks: saveLinks, + ); + }); + } + + @override + int putByIndexSync(String indexName, OBJ object, {bool saveLinks = true}) { + return isar.getTxnSync(true, (Txn txn) { + return putByIndexSyncInternal( + txn: txn, + object: object, + indexId: schema.index(indexName).id, + saveLinks: saveLinks, + ); + }); + } + + int putByIndexSyncInternal({ + required Txn txn, + int? indexId, + required OBJ object, + bool saveLinks = true, + }) { + final cObjPtr = txn.getCObject(); + final cObj = cObjPtr.ref; + + final estimatedSize = schema.estimateSize(object, _offsets, isar.offsets); + cObj.buffer = txn.getBuffer(estimatedSize); + final buffer = cObj.buffer.asTypedList(estimatedSize); + + final writer = IsarWriterImpl(buffer, _staticSize); + schema.serialize( + object, + writer, + _offsets, + isar.offsets, + ); + cObj.buffer_length = writer.usedBytes; + + cObj.id = schema.getId(object); + + if (indexId != null) { + nCall(IC.isar_put_by_index(ptr, txn.ptr, indexId, cObjPtr)); + } else { + nCall(IC.isar_put(ptr, txn.ptr, cObjPtr)); + } + + final id = cObj.id; + schema.attach(this, id, object); + + if (saveLinks) { + for (final link in schema.getLinks(object)) { + link.saveSync(); + } + } + + return id; + } + + @override + Future> putAll(List objects) { + return putAllByIndex(null, objects); + } + + @override + List putAllSync(List objects, {bool saveLinks = true}) { + return putAllByIndexSync(null, objects, saveLinks: saveLinks); + } + + @override + Future> putAllByIndex(String? indexName, List objects) { + final indexId = indexName != null ? schema.index(indexName).id : null; + + return isar.getTxn(true, (Txn txn) async { + final cObjSetPtr = txn.newCObjectSet(objects.length); + serializeObjects(txn, cObjSetPtr.ref.objects, objects); + + if (indexId != null) { + IC.isar_put_all_by_index(ptr, txn.ptr, indexId, cObjSetPtr); + } else { + IC.isar_put_all(ptr, txn.ptr, cObjSetPtr); + } + + await txn.wait(); + final cObjectSet = cObjSetPtr.ref; + final ids = List.filled(objects.length, 0); + for (var i = 0; i < objects.length; i++) { + final cObjPtr = cObjectSet.objects.elementAt(i); + final id = cObjPtr.ref.id; + ids[i] = id; + + final object = objects[i]; + schema.attach(this, id, object); + } + return ids; + }); + } + + @override + List putAllByIndexSync( + String? indexName, + List objects, { + bool saveLinks = true, + }) { + final indexId = indexName != null ? schema.index(indexName).id : null; + final ids = List.filled(objects.length, 0); + isar.getTxnSync(true, (Txn txn) { + for (var i = 0; i < objects.length; i++) { + ids[i] = putByIndexSyncInternal( + txn: txn, + object: objects[i], + indexId: indexId, + saveLinks: saveLinks, + ); + } + }); + return ids; + } + + @override + Future deleteAll(List ids) { + return isar.getTxn(true, (Txn txn) async { + final countPtr = txn.alloc(); + final idsPtr = txn.alloc(ids.length); + idsPtr.asTypedList(ids.length).setAll(0, ids); + + IC.isar_delete_all(ptr, txn.ptr, idsPtr, ids.length, countPtr); + await txn.wait(); + + return countPtr.value; + }); + } + + @override + int deleteAllSync(List ids) { + return isar.getTxnSync(true, (Txn txn) { + final deletedPtr = txn.alloc(); + + var counter = 0; + for (var i = 0; i < ids.length; i++) { + nCall(IC.isar_delete(ptr, txn.ptr, ids[i], deletedPtr)); + if (deletedPtr.value) { + counter++; + } + } + return counter; + }); + } + + @override + Future deleteAllByIndex(String indexName, List keys) { + return isar.getTxn(true, (Txn txn) async { + final countPtr = txn.alloc(); + final keysPtrPtr = _getKeysPtr(indexName, keys, txn.alloc); + + IC.isar_delete_all_by_index( + ptr, + txn.ptr, + schema.index(indexName).id, + keysPtrPtr, + keys.length, + countPtr, + ); + await txn.wait(); + + return countPtr.value; + }); + } + + @override + int deleteAllByIndexSync(String indexName, List keys) { + return isar.getTxnSync(true, (Txn txn) { + final countPtr = txn.alloc(); + final keysPtrPtr = _getKeysPtr(indexName, keys, txn.alloc); + + nCall( + IC.isar_delete_all_by_index( + ptr, + txn.ptr, + schema.index(indexName).id, + keysPtrPtr, + keys.length, + countPtr, + ), + ); + return countPtr.value; + }); + } + + @override + Future clear() { + return isar.getTxn(true, (Txn txn) async { + IC.isar_clear(ptr, txn.ptr); + await txn.wait(); + }); + } + + @override + void clearSync() { + isar.getTxnSync(true, (Txn txn) { + nCall(IC.isar_clear(ptr, txn.ptr)); + }); + } + + @override + Future importJson(List> json) { + final bytes = const Utf8Encoder().convert(jsonEncode(json)); + return importJsonRaw(bytes); + } + + @override + Future importJsonRaw(Uint8List jsonBytes) { + return isar.getTxn(true, (Txn txn) async { + final bytesPtr = txn.alloc(jsonBytes.length); + bytesPtr.asTypedList(jsonBytes.length).setAll(0, jsonBytes); + final idNamePtr = schema.idName.toCString(txn.alloc); + + IC.isar_json_import( + ptr, + txn.ptr, + idNamePtr, + bytesPtr, + jsonBytes.length, + ); + await txn.wait(); + }); + } + + @override + void importJsonSync(List> json) { + final bytes = const Utf8Encoder().convert(jsonEncode(json)); + importJsonRawSync(bytes); + } + + @override + void importJsonRawSync(Uint8List jsonBytes) { + return isar.getTxnSync(true, (Txn txn) async { + final bytesPtr = txn.getBuffer(jsonBytes.length); + bytesPtr.asTypedList(jsonBytes.length).setAll(0, jsonBytes); + final idNamePtr = schema.idName.toCString(txn.alloc); + + nCall( + IC.isar_json_import( + ptr, + txn.ptr, + idNamePtr, + bytesPtr, + jsonBytes.length, + ), + ); + }); + } + + @override + Future count() { + return isar.getTxn(false, (Txn txn) async { + final countPtr = txn.alloc(); + IC.isar_count(ptr, txn.ptr, countPtr); + await txn.wait(); + return countPtr.value; + }); + } + + @override + int countSync() { + return isar.getTxnSync(false, (Txn txn) { + final countPtr = txn.alloc(); + nCall(IC.isar_count(ptr, txn.ptr, countPtr)); + return countPtr.value; + }); + } + + @override + Future getSize({ + bool includeIndexes = false, + bool includeLinks = false, + }) { + return isar.getTxn(false, (Txn txn) async { + final sizePtr = txn.alloc(); + IC.isar_get_size(ptr, txn.ptr, includeIndexes, includeLinks, sizePtr); + await txn.wait(); + return sizePtr.value; + }); + } + + @override + int getSizeSync({bool includeIndexes = false, bool includeLinks = false}) { + return isar.getTxnSync(false, (Txn txn) { + final sizePtr = txn.alloc(); + nCall( + IC.isar_get_size( + ptr, + txn.ptr, + includeIndexes, + includeLinks, + sizePtr, + ), + ); + return sizePtr.value; + }); + } + + @override + Stream watchLazy({bool fireImmediately = false}) { + isar.requireOpen(); + final port = ReceivePort(); + final handle = + IC.isar_watch_collection(isar.ptr, ptr, port.sendPort.nativePort); + final controller = StreamController( + onCancel: () { + IC.isar_stop_watching(handle); + port.close(); + }, + ); + + if (fireImmediately) { + controller.add(null); + } + + controller.addStream(port); + return controller.stream; + } + + @override + Stream watchObject(Id id, {bool fireImmediately = false}) { + return watchObjectLazy(id, fireImmediately: fireImmediately) + .asyncMap((event) => get(id)); + } + + @override + Stream watchObjectLazy(Id id, {bool fireImmediately = false}) { + isar.requireOpen(); + final cObjPtr = malloc(); + + final port = ReceivePort(); + final handle = + IC.isar_watch_object(isar.ptr, ptr, id, port.sendPort.nativePort); + malloc.free(cObjPtr); + + final controller = StreamController( + onCancel: () { + IC.isar_stop_watching(handle); + port.close(); + }, + ); + + if (fireImmediately) { + controller.add(null); + } + + controller.addStream(port); + return controller.stream; + } + + @override + Query buildQuery({ + List whereClauses = const [], + bool whereDistinct = false, + Sort whereSort = Sort.asc, + FilterOperation? filter, + List sortBy = const [], + List distinctBy = const [], + int? offset, + int? limit, + String? property, + }) { + isar.requireOpen(); + return buildNativeQuery( + this, + whereClauses, + whereDistinct, + whereSort, + filter, + sortBy, + distinctBy, + offset, + limit, + property, + ); + } + + @override + Future verify(List objects) async { + await isar.verify(); + return isar.getTxn(false, (Txn txn) async { + final cObjSetPtr = txn.newCObjectSet(objects.length); + serializeObjects(txn, cObjSetPtr.ref.objects, objects); + + IC.isar_verify(ptr, txn.ptr, cObjSetPtr); + await txn.wait(); + }); + } + + @override + Future verifyLink( + String linkName, + List sourceIds, + List targetIds, + ) async { + final link = schema.link(linkName); + + return isar.getTxn(false, (Txn txn) async { + final idsPtr = txn.alloc(sourceIds.length + targetIds.length); + for (var i = 0; i < sourceIds.length; i++) { + idsPtr[i * 2] = sourceIds[i]; + idsPtr[i * 2 + 1] = targetIds[i]; + } + + IC.isar_link_verify( + ptr, + txn.ptr, + link.id, + idsPtr, + sourceIds.length + targetIds.length, + ); + await txn.wait(); + }); + } +} diff --git a/lib/src/native/isar_core.dart b/lib/src/native/isar_core.dart new file mode 100644 index 0000000..6ca7389 --- /dev/null +++ b/lib/src/native/isar_core.dart @@ -0,0 +1,234 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:async'; +import 'dart:ffi'; +import 'dart:io'; +import 'dart:isolate'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/isar.dart'; +import 'package:isar/src/native/bindings.dart'; + +const Id isarMinId = -9223372036854775807; + +const Id isarMaxId = 9223372036854775807; + +const Id isarAutoIncrementId = -9223372036854775808; + +typedef IsarAbi = Abi; + +const int minByte = 0; +const int maxByte = 255; +const int minInt = -2147483648; +const int maxInt = 2147483647; +const int minLong = -9223372036854775808; +const int maxLong = 9223372036854775807; +const double minDouble = double.nan; +const double maxDouble = double.infinity; + +const nullByte = IsarObject_NULL_BYTE; +const nullInt = IsarObject_NULL_INT; +const nullLong = IsarObject_NULL_LONG; +const nullFloat = double.nan; +const nullDouble = double.nan; +final nullDate = DateTime.fromMillisecondsSinceEpoch(0); + +const nullBool = IsarObject_NULL_BOOL; +const falseBool = IsarObject_FALSE_BOOL; +const trueBool = IsarObject_TRUE_BOOL; + +const String _githubUrl = 'https://github.com/isar/isar/releases/download'; + +bool _isarInitialized = false; + +// ignore: non_constant_identifier_names +late final IsarCoreBindings IC; + +typedef FinalizerFunction = void Function(Pointer token); +late final Pointer isarClose; +late final Pointer isarQueryFree; + +FutureOr initializeCoreBinary({ + Map libraries = const {}, + bool download = false, +}) { + if (_isarInitialized) { + return null; + } + + String? libraryPath; + if (!Platform.isIOS) { + libraryPath = libraries[Abi.current()] ?? Abi.current().localName; + } + + try { + _initializePath(libraryPath); + } catch (e) { + if (!Platform.isAndroid && !Platform.isIOS) { + final downloadPath = _getLibraryDownloadPath(libraries); + if (download) { + return _downloadIsarCore(downloadPath).then((value) { + _initializePath(downloadPath); + }); + } else { + // try to use the binary at the download path anyway + _initializePath(downloadPath); + } + } else { + throw IsarError( + 'Could not initialize IsarCore library for processor architecture ' + '"${Abi.current()}". If you create a Flutter app, make sure to add ' + 'isar_flutter_libs to your dependencies.\n$e', + ); + } + } +} + +void _initializePath(String? libraryPath) { + late DynamicLibrary dylib; + if (Platform.isIOS) { + dylib = DynamicLibrary.process(); + } else { + dylib = DynamicLibrary.open(libraryPath!); + } + + final bindings = IsarCoreBindings(dylib); + + final coreVersion = bindings.isar_version().cast().toDartString(); + if (coreVersion != Isar.version && coreVersion != 'debug') { + throw IsarError( + 'Incorrect Isar Core version: Required ${Isar.version} found ' + '$coreVersion. Make sure to use the latest isar_flutter_libs. If you ' + 'have a Dart only project, make sure that old Isar Core binaries are ' + 'deleted.', + ); + } + + IC = bindings; + isarClose = dylib.lookup('isar_instance_close'); + isarQueryFree = dylib.lookup('isar_q_free'); + _isarInitialized = true; +} + +String _getLibraryDownloadPath(Map libraries) { + final providedPath = libraries[Abi.current()]; + if (providedPath != null) { + return providedPath; + } else { + final name = Abi.current().localName; + if (Platform.script.path.isEmpty) { + return name; + } + var dir = Platform.script.pathSegments + .sublist(0, Platform.script.pathSegments.length - 1) + .join(Platform.pathSeparator); + if (!Platform.isWindows) { + // Not on windows, add leading platform path separator + dir = '${Platform.pathSeparator}$dir'; + } + return '$dir${Platform.pathSeparator}$name'; + } +} + +Future _downloadIsarCore(String libraryPath) async { + final libraryFile = File(libraryPath); + // ignore: avoid_slow_async_io + if (await libraryFile.exists()) { + return; + } + final remoteName = Abi.current().remoteName; + final uri = Uri.parse('$_githubUrl/${Isar.version}/$remoteName'); + final request = await HttpClient().getUrl(uri); + final response = await request.close(); + if (response.statusCode != 200) { + throw IsarError( + 'Could not download IsarCore library: ${response.reasonPhrase}', + ); + } + await response.pipe(libraryFile.openWrite()); +} + +IsarError? isarErrorFromResult(int result) { + if (result != 0) { + final error = IC.isar_get_error(result); + if (error.address == 0) { + throw IsarError( + 'There was an error but it could not be loaded from IsarCore.', + ); + } + try { + final message = error.cast().toDartString(); + return IsarError(message); + } finally { + IC.isar_free_string(error); + } + } else { + return null; + } +} + +@pragma('vm:prefer-inline') +void nCall(int result) { + final error = isarErrorFromResult(result); + if (error != null) { + throw error; + } +} + +Stream wrapIsarPort(ReceivePort port) { + final portStreamController = StreamController(onCancel: port.close); + port.listen((event) { + if (event == 0) { + portStreamController.add(null); + } else { + final error = isarErrorFromResult(event as int); + portStreamController.addError(error!); + } + }); + return portStreamController.stream; +} + +extension PointerX on Pointer { + @pragma('vm:prefer-inline') + bool get isNull => address == 0; +} + +extension on Abi { + String get localName { + switch (Abi.current()) { + case Abi.androidArm: + case Abi.androidArm64: + case Abi.androidIA32: + case Abi.androidX64: + return 'libisar.so'; + case Abi.macosArm64: + case Abi.macosX64: + return 'libisar.dylib'; + case Abi.linuxX64: + return 'libisar.so'; + case Abi.windowsArm64: + case Abi.windowsX64: + return 'isar.dll'; + default: + throw IsarError( + 'Unsupported processor architecture "${Abi.current()}". ' + 'Please open an issue on GitHub to request it.', + ); + } + } + + String get remoteName { + switch (Abi.current()) { + case Abi.macosArm64: + case Abi.macosX64: + return 'libisar_macos.dylib'; + case Abi.linuxX64: + return 'libisar_linux_x64.so'; + case Abi.windowsArm64: + return 'isar_windows_arm64.dll'; + case Abi.windowsX64: + return 'isar_windows_x64.dll'; + } + throw UnimplementedError(); + } +} diff --git a/lib/src/native/isar_impl.dart b/lib/src/native/isar_impl.dart new file mode 100644 index 0000000..c2d47a2 --- /dev/null +++ b/lib/src/native/isar_impl.dart @@ -0,0 +1,139 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:async'; +import 'dart:ffi'; +import 'dart:isolate'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/src/common/isar_common.dart'; +import 'package:isar/src/native/bindings.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/txn.dart'; + +class IsarImpl extends IsarCommon implements Finalizable { + IsarImpl(super.name, this.ptr) { + _finalizer = NativeFinalizer(isarClose); + _finalizer.attach(this, ptr.cast(), detach: this); + } + + final Pointer ptr; + late final NativeFinalizer _finalizer; + + final offsets = >{}; + + final Pointer> _syncTxnPtrPtr = malloc>(); + + String? _directory; + + @override + String get directory { + requireOpen(); + + if (_directory == null) { + final dirPtr = IC.isar_instance_get_path(ptr); + try { + _directory = dirPtr.cast().toDartString(); + } finally { + IC.isar_free_string(dirPtr); + } + } + + return _directory!; + } + + @override + Future beginTxn(bool write, bool silent) async { + final port = ReceivePort(); + final portStream = wrapIsarPort(port); + + final txnPtrPtr = malloc>(); + IC.isar_txn_begin( + ptr, + txnPtrPtr, + false, + write, + silent, + port.sendPort.nativePort, + ); + + final txn = Txn.async(this, txnPtrPtr.value, write, portStream); + await txn.wait(); + return txn; + } + + @override + Transaction beginTxnSync(bool write, bool silent) { + nCall(IC.isar_txn_begin(ptr, _syncTxnPtrPtr, true, write, silent, 0)); + return Txn.sync(this, _syncTxnPtrPtr.value, write); + } + + @override + bool performClose(bool deleteFromDisk) { + _finalizer.detach(this); + if (deleteFromDisk) { + return IC.isar_instance_close_and_delete(ptr); + } else { + return IC.isar_instance_close(ptr); + } + } + + @override + Future getSize({ + bool includeIndexes = false, + bool includeLinks = false, + }) { + return getTxn(false, (Txn txn) async { + final sizePtr = txn.alloc(); + IC.isar_instance_get_size( + ptr, + txn.ptr, + includeIndexes, + includeLinks, + sizePtr, + ); + await txn.wait(); + return sizePtr.value; + }); + } + + @override + int getSizeSync({bool includeIndexes = false, bool includeLinks = false}) { + return getTxnSync(false, (Txn txn) { + final sizePtr = txn.alloc(); + nCall( + IC.isar_instance_get_size( + ptr, + txn.ptr, + includeIndexes, + includeLinks, + sizePtr, + ), + ); + return sizePtr.value; + }); + } + + @override + Future copyToFile(String targetPath) async { + final pathPtr = targetPath.toCString(malloc); + final receivePort = ReceivePort(); + final nativePort = receivePort.sendPort.nativePort; + + try { + final stream = wrapIsarPort(receivePort); + IC.isar_instance_copy_to_file(ptr, pathPtr, nativePort); + await stream.first; + } finally { + malloc.free(pathPtr); + } + } + + @override + Future verify() async { + return getTxn(false, (Txn txn) async { + IC.isar_instance_verify(ptr, txn.ptr); + await txn.wait(); + }); + } +} diff --git a/lib/src/native/isar_link_impl.dart b/lib/src/native/isar_link_impl.dart new file mode 100644 index 0000000..3c954c7 --- /dev/null +++ b/lib/src/native/isar_link_impl.dart @@ -0,0 +1,121 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:ffi'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/common/isar_link_base_impl.dart'; +import 'package:isar/src/common/isar_link_common.dart'; +import 'package:isar/src/common/isar_links_common.dart'; +import 'package:isar/src/native/isar_collection_impl.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/txn.dart'; + +mixin IsarLinkBaseMixin on IsarLinkBaseImpl { + @override + IsarCollectionImpl get sourceCollection => + super.sourceCollection as IsarCollectionImpl; + + @override + IsarCollectionImpl get targetCollection => + super.targetCollection as IsarCollectionImpl; + + late final int linkId = sourceCollection.schema.link(linkName).id; + + @override + late final getId = targetCollection.schema.getId; + + @override + Future update({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }) { + final linkList = link.toList(); + final unlinkList = unlink.toList(); + + final containingId = requireAttached(); + return targetCollection.isar.getTxn(true, (Txn txn) { + final count = linkList.length + unlinkList.length; + final idsPtr = txn.alloc(count); + final ids = idsPtr.asTypedList(count); + + for (var i = 0; i < linkList.length; i++) { + ids[i] = requireGetId(linkList[i]); + } + for (var i = 0; i < unlinkList.length; i++) { + ids[linkList.length + i] = requireGetId(unlinkList[i]); + } + + IC.isar_link_update_all( + sourceCollection.ptr, + txn.ptr, + linkId, + containingId, + idsPtr, + linkList.length, + unlinkList.length, + reset, + ); + return txn.wait(); + }); + } + + @override + void updateSync({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }) { + final containingId = requireAttached(); + targetCollection.isar.getTxnSync(true, (Txn txn) { + if (reset) { + nCall( + IC.isar_link_unlink_all( + sourceCollection.ptr, + txn.ptr, + linkId, + containingId, + ), + ); + } + + for (final object in link) { + var id = getId(object); + if (id == Isar.autoIncrement) { + id = targetCollection.putByIndexSyncInternal( + txn: txn, + object: object, + ); + } + + nCall( + IC.isar_link( + sourceCollection.ptr, + txn.ptr, + linkId, + containingId, + id, + ), + ); + } + for (final object in unlink) { + final unlinkId = requireGetId(object); + nCall( + IC.isar_link_unlink( + sourceCollection.ptr, + txn.ptr, + linkId, + containingId, + unlinkId, + ), + ); + } + }); + } +} + +class IsarLinkImpl extends IsarLinkCommon + with IsarLinkBaseMixin {} + +class IsarLinksImpl extends IsarLinksCommon + with IsarLinkBaseMixin {} diff --git a/lib/src/native/isar_reader_impl.dart b/lib/src/native/isar_reader_impl.dart new file mode 100644 index 0000000..96969cb --- /dev/null +++ b/lib/src/native/isar_reader_impl.dart @@ -0,0 +1,591 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:convert'; +import 'dart:typed_data'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:meta/meta.dart'; + +/// @nodoc +@protected +class IsarReaderImpl implements IsarReader { + IsarReaderImpl(this._buffer) + : _byteData = ByteData.view(_buffer.buffer, _buffer.offsetInBytes) { + _staticSize = _byteData.getUint16(0, Endian.little); + } + + static const Utf8Decoder utf8Decoder = Utf8Decoder(); + + final Uint8List _buffer; + final ByteData _byteData; + late int _staticSize; + + @pragma('vm:prefer-inline') + bool _readBool(int offset) { + final value = _buffer[offset]; + if (value == trueBool) { + return true; + } else { + return false; + } + } + + @pragma('vm:prefer-inline') + @override + bool readBool(int offset) { + if (offset >= _staticSize) { + return false; + } + return _readBool(offset); + } + + @pragma('vm:prefer-inline') + bool? _readBoolOrNull(int offset) { + final value = _buffer[offset]; + if (value == trueBool) { + return true; + } else if (value == falseBool) { + return false; + } else { + return null; + } + } + + @pragma('vm:prefer-inline') + @override + bool? readBoolOrNull(int offset) { + if (offset >= _staticSize) { + return null; + } + return _readBoolOrNull(offset); + } + + @pragma('vm:prefer-inline') + @override + int readByte(int offset) { + if (offset >= _staticSize) { + return 0; + } + return _buffer[offset]; + } + + @pragma('vm:prefer-inline') + @override + int? readByteOrNull(int offset) { + if (offset >= _staticSize) { + return null; + } + return _buffer[offset]; + } + + @pragma('vm:prefer-inline') + @override + int readInt(int offset) { + if (offset >= _staticSize) { + return nullInt; + } + return _byteData.getInt32(offset, Endian.little); + } + + @pragma('vm:prefer-inline') + int? _readIntOrNull(int offset) { + final value = _byteData.getInt32(offset, Endian.little); + if (value != nullInt) { + return value; + } else { + return null; + } + } + + @pragma('vm:prefer-inline') + @override + int? readIntOrNull(int offset) { + if (offset >= _staticSize) { + return null; + } + return _readIntOrNull(offset); + } + + @pragma('vm:prefer-inline') + @override + double readFloat(int offset) { + if (offset >= _staticSize) { + return nullDouble; + } + return _byteData.getFloat32(offset, Endian.little); + } + + @pragma('vm:prefer-inline') + double? _readFloatOrNull(int offset) { + final value = _byteData.getFloat32(offset, Endian.little); + if (!value.isNaN) { + return value; + } else { + return null; + } + } + + @pragma('vm:prefer-inline') + @override + double? readFloatOrNull(int offset) { + if (offset >= _staticSize) { + return null; + } + return _readFloatOrNull(offset); + } + + @pragma('vm:prefer-inline') + @override + int readLong(int offset) { + if (offset >= _staticSize) { + return nullLong; + } + return _byteData.getInt64(offset, Endian.little); + } + + @pragma('vm:prefer-inline') + int? _readLongOrNull(int offset) { + final value = _byteData.getInt64(offset, Endian.little); + if (value != nullLong) { + return value; + } else { + return null; + } + } + + @pragma('vm:prefer-inline') + @override + int? readLongOrNull(int offset) { + if (offset >= _staticSize) { + return null; + } + return _readLongOrNull(offset); + } + + @pragma('vm:prefer-inline') + @override + double readDouble(int offset) { + if (offset >= _staticSize) { + return nullDouble; + } + return _byteData.getFloat64(offset, Endian.little); + } + + @pragma('vm:prefer-inline') + double? _readDoubleOrNull(int offset) { + final value = _byteData.getFloat64(offset, Endian.little); + if (!value.isNaN) { + return value; + } else { + return null; + } + } + + @pragma('vm:prefer-inline') + @override + double? readDoubleOrNull(int offset) { + if (offset >= _staticSize) { + return null; + } + return _readDoubleOrNull(offset); + } + + @pragma('vm:prefer-inline') + @override + DateTime readDateTime(int offset) { + final time = readLongOrNull(offset); + return time != null + ? DateTime.fromMicrosecondsSinceEpoch(time, isUtc: true).toLocal() + : nullDate; + } + + @pragma('vm:prefer-inline') + @override + DateTime? readDateTimeOrNull(int offset) { + final time = readLongOrNull(offset); + if (time != null) { + return DateTime.fromMicrosecondsSinceEpoch(time, isUtc: true).toLocal(); + } else { + return null; + } + } + + @pragma('vm:prefer-inline') + int _readUint24(int offset) { + return _buffer[offset] | + _buffer[offset + 1] << 8 | + _buffer[offset + 2] << 16; + } + + @pragma('vm:prefer-inline') + @override + String readString(int offset) { + return readStringOrNull(offset) ?? ''; + } + + @pragma('vm:prefer-inline') + @override + String? readStringOrNull(int offset) { + if (offset >= _staticSize) { + return null; + } + + var bytesOffset = _readUint24(offset); + if (bytesOffset == 0) { + return null; + } + + final length = _readUint24(bytesOffset); + bytesOffset += 3; + + return utf8Decoder.convert(_buffer, bytesOffset, bytesOffset + length); + } + + @pragma('vm:prefer-inline') + @override + T? readObjectOrNull( + int offset, + Deserialize deserialize, + Map> allOffsets, + ) { + if (offset >= _staticSize) { + return null; + } + + var bytesOffset = _readUint24(offset); + if (bytesOffset == 0) { + return null; + } + + final length = _readUint24(bytesOffset); + bytesOffset += 3; + + final buffer = + Uint8List.sublistView(_buffer, bytesOffset, bytesOffset + length); + final reader = IsarReaderImpl(buffer); + final offsets = allOffsets[T]!; + return deserialize(0, reader, offsets, allOffsets); + } + + @override + List? readBoolList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = List.filled(length, false); + for (var i = 0; i < length; i++) { + list[i] = _readBool(listOffset + i); + } + return list; + } + + @override + List? readBoolOrNullList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = List.filled(length, null); + for (var i = 0; i < length; i++) { + list[i] = _readBoolOrNull(listOffset + i); + } + return list; + } + + @override + List? readByteList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + return _buffer.sublist(listOffset, listOffset + length); + } + + @override + List? readIntList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = Int32List(length); + for (var i = 0; i < length; i++) { + list[i] = _byteData.getInt32(listOffset + i * 4, Endian.little); + } + return list; + } + + @override + List? readIntOrNullList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = List.filled(length, null); + for (var i = 0; i < length; i++) { + list[i] = _readIntOrNull(listOffset + i * 4); + } + return list; + } + + @override + List? readFloatList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = Float32List(length); + for (var i = 0; i < length; i++) { + list[i] = _byteData.getFloat32(listOffset + i * 4, Endian.little); + } + return list; + } + + @override + List? readFloatOrNullList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = List.filled(length, null); + for (var i = 0; i < length; i++) { + list[i] = _readFloatOrNull(listOffset + i * 4); + } + return list; + } + + @override + List? readLongList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = Int64List(length); + for (var i = 0; i < length; i++) { + list[i] = _byteData.getInt64(listOffset + i * 8, Endian.little); + } + return list; + } + + @override + List? readLongOrNullList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = List.filled(length, null); + for (var i = 0; i < length; i++) { + list[i] = _readLongOrNull(listOffset + i * 8); + } + return list; + } + + @override + List? readDoubleList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = Float64List(length); + for (var i = 0; i < length; i++) { + list[i] = _byteData.getFloat64(listOffset + i * 8, Endian.little); + } + return list; + } + + @override + List? readDoubleOrNullList(int offset) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = List.filled(length, null); + for (var i = 0; i < length; i++) { + list[i] = _readDoubleOrNull(listOffset + i * 8); + } + return list; + } + + @override + List? readDateTimeList(int offset) { + return readLongOrNullList(offset)?.map((e) { + if (e != null) { + return DateTime.fromMicrosecondsSinceEpoch(e, isUtc: true).toLocal(); + } else { + return nullDate; + } + }).toList(); + } + + @override + List? readDateTimeOrNullList(int offset) { + return readLongOrNullList(offset)?.map((e) { + if (e != null) { + return DateTime.fromMicrosecondsSinceEpoch(e, isUtc: true).toLocal(); + } + }).toList(); + } + + List? readDynamicList( + int offset, + T nullValue, + T Function(int startOffset, int endOffset) transform, + ) { + if (offset >= _staticSize) { + return null; + } + + var listOffset = _readUint24(offset); + if (listOffset == 0) { + return null; + } + + final length = _readUint24(listOffset); + listOffset += 3; + + final list = List.filled(length, nullValue); + var contentOffset = listOffset + length * 3; + for (var i = 0; i < length; i++) { + final itemSize = _readUint24(listOffset + i * 3); + + if (itemSize != 0) { + list[i] = transform(contentOffset, contentOffset + itemSize - 1); + contentOffset += itemSize - 1; + } + } + + return list; + } + + @override + List? readStringList(int offset) { + return readDynamicList(offset, '', (startOffset, endOffset) { + return utf8Decoder.convert(_buffer, startOffset, endOffset); + }); + } + + @override + List? readStringOrNullList(int offset) { + return readDynamicList(offset, null, (startOffset, endOffset) { + return utf8Decoder.convert(_buffer, startOffset, endOffset); + }); + } + + @override + List? readObjectList( + int offset, + Deserialize deserialize, + Map> allOffsets, + T defaultValue, + ) { + final offsets = allOffsets[T]!; + return readDynamicList(offset, defaultValue, (startOffset, endOffset) { + final buffer = Uint8List.sublistView(_buffer, startOffset, endOffset); + final reader = IsarReaderImpl(buffer); + return deserialize(0, reader, offsets, allOffsets); + }); + } + + @override + List? readObjectOrNullList( + int offset, + Deserialize deserialize, + Map> allOffsets, + ) { + final offsets = allOffsets[T]!; + return readDynamicList(offset, null, (startOffset, endOffset) { + final buffer = Uint8List.sublistView(_buffer, startOffset, endOffset); + final reader = IsarReaderImpl(buffer); + return deserialize(0, reader, offsets, allOffsets); + }); + } +} diff --git a/lib/src/native/isar_writer_impl.dart b/lib/src/native/isar_writer_impl.dart new file mode 100644 index 0000000..7509e67 --- /dev/null +++ b/lib/src/native/isar_writer_impl.dart @@ -0,0 +1,284 @@ +// ignore_for_file: public_member_api_docs, prefer_asserts_with_message, +// avoid_positional_boolean_parameters + +import 'dart:typed_data'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:meta/meta.dart'; + +/// @nodoc +@protected +class IsarWriterImpl implements IsarWriter { + IsarWriterImpl(Uint8List buffer, int staticSize) + : _dynamicOffset = staticSize, + _buffer = buffer, + _byteData = ByteData.view(buffer.buffer, buffer.offsetInBytes) { + _byteData.setUint16(0, staticSize, Endian.little); + + // Required because we don't want to persist uninitialized memory. + for (var i = 2; i < staticSize; i++) { + _buffer[i] = 0; + } + } + + final Uint8List _buffer; + + final ByteData _byteData; + + int _dynamicOffset; + + int get usedBytes => _dynamicOffset; + + @override + @pragma('vm:prefer-inline') + void writeBool(int offset, bool? value) { + _buffer[offset] = value.byteValue; + } + + @override + @pragma('vm:prefer-inline') + void writeByte(int offset, int value) { + assert(value >= minByte && value <= maxByte); + _buffer[offset] = value; + } + + @override + @pragma('vm:prefer-inline') + void writeInt(int offset, int? value) { + value ??= nullInt; + assert(value >= minInt && value <= maxInt); + _byteData.setInt32(offset, value, Endian.little); + } + + @override + @pragma('vm:prefer-inline') + void writeFloat(int offset, double? value) { + _byteData.setFloat32(offset, value ?? double.nan, Endian.little); + } + + @override + @pragma('vm:prefer-inline') + void writeLong(int offset, int? value) { + _byteData.setInt64(offset, value ?? nullLong, Endian.little); + } + + @override + @pragma('vm:prefer-inline') + void writeDouble(int offset, double? value) { + _byteData.setFloat64(offset, value ?? double.nan, Endian.little); + } + + @override + @pragma('vm:prefer-inline') + void writeDateTime(int offset, DateTime? value) { + writeLong(offset, value?.toUtc().microsecondsSinceEpoch); + } + + @pragma('vm:prefer-inline') + void _writeUint24(int offset, int value) { + _buffer[offset] = value; + _buffer[offset + 1] = value >> 8; + _buffer[offset + 2] = value >> 16; + } + + @override + @pragma('vm:prefer-inline') + void writeString(int offset, String? value) { + if (value != null) { + final byteCount = encodeString(value, _buffer, _dynamicOffset + 3); + _writeUint24(offset, _dynamicOffset); + _writeUint24(_dynamicOffset, byteCount); + _dynamicOffset += byteCount + 3; + } else { + _writeUint24(offset, 0); + } + } + + @override + @pragma('vm:prefer-inline') + void writeObject( + int offset, + Map> allOffsets, + Serialize serialize, + T? value, + ) { + if (value != null) { + final buffer = Uint8List.sublistView(_buffer, _dynamicOffset + 3); + final offsets = allOffsets[T]!; + final binaryWriter = IsarWriterImpl(buffer, offsets.last); + serialize(value, binaryWriter, offsets, allOffsets); + final byteCount = binaryWriter.usedBytes; + _writeUint24(offset, _dynamicOffset); + _writeUint24(_dynamicOffset, byteCount); + _dynamicOffset += byteCount + 3; + } else { + _writeUint24(offset, 0); + } + } + + @pragma('vm:prefer-inline') + void _writeListOffset(int offset, int? length) { + if (length == null) { + _writeUint24(offset, 0); + } else { + _writeUint24(offset, _dynamicOffset); + _writeUint24(_dynamicOffset, length); + _dynamicOffset += 3; + } + } + + @override + @pragma('vm:prefer-inline') + void writeByteList(int offset, List? values) { + _writeListOffset(offset, values?.length); + + if (values != null) { + for (var i = 0; i < values.length; i++) { + _buffer[_dynamicOffset++] = values[i]; + } + } + } + + @override + void writeBoolList(int offset, List? values) { + _writeListOffset(offset, values?.length); + + if (values != null) { + for (var i = 0; i < values.length; i++) { + _buffer[_dynamicOffset++] = values[i].byteValue; + } + } + } + + @override + void writeIntList(int offset, List? values) { + _writeListOffset(offset, values?.length); + + if (values != null) { + for (var value in values) { + value ??= nullInt; + assert(value >= minInt && value <= maxInt); + _byteData.setUint32(_dynamicOffset, value, Endian.little); + _dynamicOffset += 4; + } + } + } + + @override + void writeFloatList(int offset, List? values) { + _writeListOffset(offset, values?.length); + + if (values != null) { + for (var i = 0; i < values.length; i++) { + _byteData.setFloat32( + _dynamicOffset, + values[i] ?? nullFloat, + Endian.little, + ); + _dynamicOffset += 4; + } + } + } + + @override + void writeLongList(int offset, List? values) { + _writeListOffset(offset, values?.length); + + if (values != null) { + for (var i = 0; i < values.length; i++) { + _byteData.setInt64( + _dynamicOffset, + values[i] ?? nullLong, + Endian.little, + ); + _dynamicOffset += 8; + } + } + } + + @override + void writeDoubleList(int offset, List? values) { + _writeListOffset(offset, values?.length); + + if (values != null) { + for (var i = 0; i < values.length; i++) { + _byteData.setFloat64( + _dynamicOffset, + values[i] ?? nullDouble, + Endian.little, + ); + _dynamicOffset += 8; + } + } + } + + @override + void writeDateTimeList(int offset, List? values) { + final longList = values?.map((e) => e.longValue).toList(); + writeLongList(offset, longList); + } + + @override + void writeStringList(int offset, List? values) { + _writeListOffset(offset, values?.length); + + if (values != null) { + final offsetListOffset = _dynamicOffset; + _dynamicOffset += values.length * 3; + for (var i = 0; i < values.length; i++) { + final value = values[i]; + if (value != null) { + final byteCount = encodeString(value, _buffer, _dynamicOffset); + _writeUint24(offsetListOffset + i * 3, byteCount + 1); + _dynamicOffset += byteCount; + } else { + _writeUint24(offsetListOffset + i * 3, 0); + } + } + } + } + + @override + void writeObjectList( + int offset, + Map> allOffsets, + Serialize serialize, + List? values, + ) { + _writeListOffset(offset, values?.length); + + if (values != null) { + final offsetListOffset = _dynamicOffset; + _dynamicOffset += values.length * 3; + + final offsets = allOffsets[T]!; + final staticSize = offsets.last; + for (var i = 0; i < values.length; i++) { + final value = values[i]; + if (value != null) { + final buffer = Uint8List.sublistView(_buffer, _dynamicOffset); + final binaryWriter = IsarWriterImpl(buffer, staticSize); + serialize(value, binaryWriter, offsets, allOffsets); + final byteCount = binaryWriter.usedBytes; + _writeUint24(offsetListOffset + i * 3, byteCount + 1); + _dynamicOffset += byteCount; + } else { + _writeUint24(offsetListOffset + i * 3, 0); + } + } + } + } +} + +extension IsarBoolValue on bool? { + @pragma('vm:prefer-inline') + int get byteValue => + this == null ? nullBool : (this == true ? trueBool : falseBool); +} + +extension IsarDateTimeValue on DateTime? { + @pragma('vm:prefer-inline') + int get longValue => this?.toUtc().microsecondsSinceEpoch ?? nullLong; +} diff --git a/lib/src/native/open.dart b/lib/src/native/open.dart new file mode 100644 index 0000000..1b169e0 --- /dev/null +++ b/lib/src/native/open.dart @@ -0,0 +1,159 @@ +// ignore_for_file: public_member_api_docs, invalid_use_of_protected_member + +import 'dart:convert'; +import 'dart:ffi'; +import 'dart:isolate'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/isar.dart'; +import 'package:isar/src/common/schemas.dart'; +import 'package:isar/src/native/bindings.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/isar_collection_impl.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/isar_impl.dart'; + +final Pointer> _isarPtrPtr = + malloc>(); + +List _getOffsets( + Pointer colPtr, + int propertiesCount, + int embeddedColId, +) { + final offsetsPtr = malloc(propertiesCount); + final staticSize = IC.isar_get_offsets(colPtr, embeddedColId, offsetsPtr); + final offsets = offsetsPtr.asTypedList(propertiesCount).toList(); + offsets.add(staticSize); + malloc.free(offsetsPtr); + return offsets; +} + +void _initializeInstance( + IsarImpl isar, + List> schemas, +) { + final colPtrPtr = malloc>(); + + final cols = >{}; + for (final schema in schemas) { + nCall(IC.isar_instance_get_collection(isar.ptr, colPtrPtr, schema.id)); + + final offsets = _getOffsets(colPtrPtr.value, schema.properties.length, 0); + + for (final embeddedSchema in schema.embeddedSchemas.values) { + final embeddedType = embeddedSchema.type; + if (!isar.offsets.containsKey(embeddedType)) { + final offsets = _getOffsets( + colPtrPtr.value, + embeddedSchema.properties.length, + embeddedSchema.id, + ); + isar.offsets[embeddedType] = offsets; + } + } + + schema.toCollection(() { + isar.offsets[OBJ] = offsets; + + schema as CollectionSchema; + cols[OBJ] = IsarCollectionImpl( + isar: isar, + ptr: colPtrPtr.value, + schema: schema, + ); + }); + } + + malloc.free(colPtrPtr); + + isar.attachCollections(cols); +} + +Future openIsar({ + required List> schemas, + required String directory, + required String name, + required int maxSizeMiB, + required bool relaxedDurability, + CompactCondition? compactOnLaunch, +}) async { + initializeCoreBinary(); + IC.isar_connect_dart_api(NativeApi.postCObject.cast()); + + return using((Arena alloc) async { + final namePtr = name.toCString(alloc); + final dirPtr = directory.toCString(alloc); + + final schemasJson = getSchemas(schemas).map((e) => e.toJson()); + final schemaStrPtr = jsonEncode(schemasJson.toList()).toCString(alloc); + + final compactMinFileSize = compactOnLaunch?.minFileSize; + final compactMinBytes = compactOnLaunch?.minBytes; + final compactMinRatio = + compactOnLaunch == null ? double.nan : compactOnLaunch.minRatio; + + final receivePort = ReceivePort(); + final nativePort = receivePort.sendPort.nativePort; + final stream = wrapIsarPort(receivePort); + IC.isar_instance_create_async( + _isarPtrPtr, + namePtr, + dirPtr, + schemaStrPtr, + maxSizeMiB, + relaxedDurability, + compactMinFileSize ?? 0, + compactMinBytes ?? 0, + compactMinRatio ?? 0, + nativePort, + ); + await stream.first; + + final isar = IsarImpl(name, _isarPtrPtr.value); + _initializeInstance(isar, schemas); + return isar; + }); +} + +Isar openIsarSync({ + required List> schemas, + required String directory, + required String name, + required int maxSizeMiB, + required bool relaxedDurability, + CompactCondition? compactOnLaunch, +}) { + initializeCoreBinary(); + IC.isar_connect_dart_api(NativeApi.postCObject.cast()); + return using((Arena alloc) { + final namePtr = name.toCString(alloc); + final dirPtr = directory.toCString(alloc); + + final schemasJson = getSchemas(schemas).map((e) => e.toJson()); + final schemaStrPtr = jsonEncode(schemasJson.toList()).toCString(alloc); + + final compactMinFileSize = compactOnLaunch?.minFileSize; + final compactMinBytes = compactOnLaunch?.minBytes; + final compactMinRatio = + compactOnLaunch == null ? double.nan : compactOnLaunch.minRatio; + + nCall( + IC.isar_instance_create( + _isarPtrPtr, + namePtr, + dirPtr, + schemaStrPtr, + maxSizeMiB, + relaxedDurability, + compactMinFileSize ?? 0, + compactMinBytes ?? 0, + compactMinRatio ?? 0, + ), + ); + + final isar = IsarImpl(name, _isarPtrPtr.value); + _initializeInstance(isar, schemas); + return isar; + }); +} diff --git a/lib/src/native/query_build.dart b/lib/src/native/query_build.dart new file mode 100644 index 0000000..1e78717 --- /dev/null +++ b/lib/src/native/query_build.dart @@ -0,0 +1,1040 @@ +// ignore_for_file: invalid_use_of_protected_member, public_member_api_docs + +import 'dart:ffi'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/isar.dart'; +import 'package:isar/src/native/bindings.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/index_key.dart'; +import 'package:isar/src/native/isar_collection_impl.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/isar_writer_impl.dart'; +import 'package:isar/src/native/query_impl.dart'; + +final Pointer maxStr = '\u{FFFFF}'.toNativeUtf8().cast(); + +Query buildNativeQuery( + IsarCollectionImpl col, + List whereClauses, + bool whereDistinct, + Sort whereSort, + FilterOperation? filter, + List sortBy, + List distinctBy, + int? offset, + int? limit, + String? property, +) { + final qbPtr = IC.isar_qb_create(col.ptr); + + for (final wc in whereClauses) { + if (wc is IdWhereClause) { + _addIdWhereClause(qbPtr, wc, whereSort); + } else if (wc is IndexWhereClause) { + _addIndexWhereClause( + col.schema, + qbPtr, + wc, + whereDistinct, + whereSort, + ); + } else { + _addLinkWhereClause(col.isar, qbPtr, wc as LinkWhereClause); + } + } + + if (filter != null) { + final alloc = Arena(malloc); + try { + final filterPtr = _buildFilter(col, null, filter, alloc); + if (filterPtr != null) { + IC.isar_qb_set_filter(qbPtr, filterPtr); + } + } finally { + alloc.releaseAll(); + } + } + + for (final sortProperty in sortBy) { + final property = col.schema.property(sortProperty.property); + nCall( + IC.isar_qb_add_sort_by( + qbPtr, + property.id, + sortProperty.sort == Sort.asc, + ), + ); + } + + if (offset != null || limit != null) { + IC.isar_qb_set_offset_limit(qbPtr, offset ?? -1, limit ?? -1); + } + + for (final distinctByProperty in distinctBy) { + final property = col.schema.property(distinctByProperty.property); + nCall( + IC.isar_qb_add_distinct_by( + qbPtr, + property.id, + distinctByProperty.caseSensitive ?? true, + ), + ); + } + + QueryDeserialize deserialize; + int? propertyId; + if (property == null) { + deserialize = (col as IsarCollectionImpl).deserializeObjects; + } else { + propertyId = + property != col.schema.idName ? col.schema.property(property).id : null; + deserialize = + (CObjectSet cObjSet) => col.deserializeProperty(cObjSet, propertyId); + } + + final queryPtr = IC.isar_qb_build(qbPtr); + return QueryImpl(col, queryPtr, deserialize, propertyId); +} + +void _addIdWhereClause( + Pointer qbPtr, + IdWhereClause wc, + Sort sort, +) { + final lower = (wc.lower ?? minLong) + (wc.includeLower ? 0 : 1); + final upper = (wc.upper ?? maxLong) - (wc.includeUpper ? 0 : 1); + nCall( + IC.isar_qb_add_id_where_clause( + qbPtr, + sort == Sort.asc ? lower : upper, + sort == Sort.asc ? upper : lower, + ), + ); +} + +Pointer? _buildLowerIndexBound( + CollectionSchema schema, + IndexSchema index, + IndexWhereClause wc, +) { + if (wc.lower == null) { + return buildLowerUnboundedIndexKey(); + } + + final firstVal = wc.lower!.length == 1 ? wc.lower!.first : null; + if (firstVal is double) { + final adjusted = adjustFloatBound( + value: firstVal, + lowerBound: true, + include: wc.includeLower, + epsilon: wc.epsilon, + ); + if (adjusted == null) { + return null; + } + + return buildIndexKey(schema, index, [adjusted]); + } else { + final lowerPtr = buildIndexKey(schema, index, wc.lower!); + + if (!wc.includeLower) { + if (!IC.isar_key_increase(lowerPtr)) { + return null; + } + } + + return lowerPtr; + } +} + +Pointer? _buildUpperIndexBound( + CollectionSchema schema, + IndexSchema index, + IndexWhereClause wc, +) { + if (wc.upper == null) { + return buildUpperUnboundedIndexKey(); + } + + final firstVal = wc.upper!.length == 1 ? wc.upper!.first : null; + if (firstVal is double) { + final adjusted = adjustFloatBound( + value: firstVal, + lowerBound: false, + include: wc.includeUpper, + epsilon: wc.epsilon, + ); + if (adjusted == null) { + return null; + } else { + return buildIndexKey(schema, index, [adjusted]); + } + } else { + final upperPtr = buildIndexKey(schema, index, wc.upper!); + + if (!wc.includeUpper) { + if (!IC.isar_key_decrease(upperPtr)) { + return null; + } + } + + // Also include composite indexes for upper keys + if (index.properties.length > wc.upper!.length) { + IC.isar_key_add_long(upperPtr, maxLong); + } + + return upperPtr; + } +} + +void _addIndexWhereClause( + CollectionSchema schema, + Pointer qbPtr, + IndexWhereClause wc, + bool distinct, + Sort sort, +) { + final index = schema.index(wc.indexName); + final lowerPtr = _buildLowerIndexBound(schema, index, wc); + final upperPtr = _buildUpperIndexBound(schema, index, wc); + + if (lowerPtr != null && upperPtr != null) { + nCall( + IC.isar_qb_add_index_where_clause( + qbPtr, + schema.index(wc.indexName).id, + lowerPtr, + upperPtr, + sort == Sort.asc, + distinct, + ), + ); + } else { + // this where clause does not match any objects + nCall( + IC.isar_qb_add_id_where_clause( + qbPtr, + Isar.autoIncrement, + Isar.autoIncrement, + ), + ); + } +} + +void _addLinkWhereClause( + Isar isar, + Pointer qbPtr, + LinkWhereClause wc, +) { + final linkCol = isar.getCollectionByNameInternal(wc.linkCollection)!; + linkCol as IsarCollectionImpl; + + final linkId = linkCol.schema.link(wc.linkName).id; + nCall(IC.isar_qb_add_link_where_clause(qbPtr, linkCol.ptr, linkId, wc.id)); +} + +Pointer? _buildFilter( + IsarCollectionImpl col, + Schema? embeddedCol, + FilterOperation filter, + Allocator alloc, +) { + if (filter is FilterGroup) { + return _buildFilterGroup(col, embeddedCol, filter, alloc); + } else if (filter is LinkFilter) { + return _buildLink(col, filter, alloc); + } else if (filter is ObjectFilter) { + return _buildObject(col, embeddedCol, filter, alloc); + } else if (filter is FilterCondition) { + return _buildCondition(col, embeddedCol, filter, alloc); + } else { + return null; + } +} + +Pointer? _buildFilterGroup( + IsarCollectionImpl col, + Schema? embeddedCol, + FilterGroup group, + Allocator alloc, +) { + final builtConditions = group.filters + .map((FilterOperation op) => _buildFilter(col, embeddedCol, op, alloc)) + .where((Pointer? it) => it != null) + .toList(); + + if (builtConditions.isEmpty) { + return null; + } + + final filterPtrPtr = alloc>(); + if (group.type == FilterGroupType.not) { + IC.isar_filter_not( + filterPtrPtr, + builtConditions.first!, + ); + } else if (builtConditions.length == 1) { + return builtConditions[0]; + } else { + final conditionsPtrPtr = alloc>(builtConditions.length); + for (var i = 0; i < builtConditions.length; i++) { + conditionsPtrPtr[i] = builtConditions[i]!; + } + IC.isar_filter_and_or_xor( + filterPtrPtr, + group.type == FilterGroupType.and, + group.type == FilterGroupType.xor, + conditionsPtrPtr, + builtConditions.length, + ); + } + + return filterPtrPtr.value; +} + +Pointer? _buildLink( + IsarCollectionImpl col, + LinkFilter link, + Allocator alloc, +) { + final linkSchema = col.schema.link(link.linkName); + final linkTargetCol = + col.isar.getCollectionByNameInternal(linkSchema.target)!; + final linkId = col.schema.link(link.linkName).id; + + final filterPtrPtr = alloc>(); + + if (link.filter != null) { + final condition = _buildFilter( + linkTargetCol as IsarCollectionImpl, + null, + link.filter!, + alloc, + ); + if (condition == null) { + return null; + } + + nCall( + IC.isar_filter_link( + col.ptr, + filterPtrPtr, + condition, + linkId, + ), + ); + } else { + nCall( + IC.isar_filter_link_length( + col.ptr, + filterPtrPtr, + link.lower!, + link.upper!, + linkId, + ), + ); + } + + return filterPtrPtr.value; +} + +Pointer? _buildObject( + IsarCollectionImpl col, + Schema? embeddedCol, + ObjectFilter objectFilter, + Allocator alloc, +) { + final property = (embeddedCol ?? col.schema).property(objectFilter.property); + + final condition = _buildFilter( + col, + col.schema.embeddedSchemas[property.target], + objectFilter.filter, + alloc, + ); + if (condition == null) { + return null; + } + + final filterPtrPtr = alloc>(); + nCall( + IC.isar_filter_object( + col.ptr, + filterPtrPtr, + condition, + embeddedCol?.id ?? 0, + property.id, + ), + ); + + return filterPtrPtr.value; +} + +Object _prepareValue( + Object? value, + Allocator alloc, + IsarType type, + Map? enumMap, +) { + if (value is bool) { + return value.byteValue; + } else if (value is DateTime) { + return value.longValue; + } else if (value is Enum) { + return _prepareValue(enumMap![value.name], alloc, type, null); + } else if (value is String) { + return value.toCString(alloc); + } else if (value == null) { + switch (type) { + case IsarType.bool: + case IsarType.byte: + case IsarType.boolList: + case IsarType.byteList: + return minByte; + case IsarType.int: + case IsarType.intList: + return minInt; + case IsarType.long: + case IsarType.longList: + case IsarType.dateTime: + case IsarType.dateTimeList: + return minLong; + case IsarType.float: + case IsarType.double: + case IsarType.floatList: + case IsarType.doubleList: + return minDouble; + case IsarType.string: + case IsarType.stringList: + case IsarType.object: + case IsarType.objectList: + return nullptr; + } + } else { + return value; + } +} + +Pointer _buildCondition( + IsarCollectionImpl col, + Schema? embeddedCol, + FilterCondition condition, + Allocator alloc, +) { + final property = condition.property != col.schema.idName + ? (embeddedCol ?? col.schema).property(condition.property) + : null; + + final value1 = _prepareValue( + condition.value1, + alloc, + property?.type ?? IsarType.long, + property?.enumMap, + ); + final value2 = _prepareValue( + condition.value2, + alloc, + property?.type ?? IsarType.long, + property?.enumMap, + ); + final filterPtr = alloc>(); + + switch (condition.type) { + case FilterConditionType.equalTo: + _buildConditionEqual( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + val: value1, + caseSensitive: condition.caseSensitive, + epsilon: condition.epsilon, + ); + break; + case FilterConditionType.between: + _buildConditionBetween( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + lower: value1, + includeLower: condition.include1, + upper: value2, + includeUpper: condition.include2, + caseSensitive: condition.caseSensitive, + epsilon: condition.epsilon, + ); + break; + case FilterConditionType.lessThan: + _buildConditionLessThan( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + val: value1, + include: condition.include1, + caseSensitive: condition.caseSensitive, + epsilon: condition.epsilon, + ); + break; + case FilterConditionType.greaterThan: + _buildConditionGreaterThan( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + val: value1, + include: condition.include1, + caseSensitive: condition.caseSensitive, + epsilon: condition.epsilon, + ); + break; + case FilterConditionType.startsWith: + case FilterConditionType.endsWith: + case FilterConditionType.contains: + case FilterConditionType.matches: + _buildConditionStringOp( + colPtr: col.ptr, + filterPtr: filterPtr, + conditionType: condition.type, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + val: value1, + include: condition.include1, + caseSensitive: condition.caseSensitive, + ); + break; + case FilterConditionType.isNull: + _buildConditionIsNull( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + ); + break; + case FilterConditionType.isNotNull: + _buildConditionIsNotNull( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + alloc: alloc, + ); + break; + case FilterConditionType.elementIsNull: + _buildConditionElementIsNull( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + isObjectList: property?.type == IsarType.objectList, + nullValue: value1, + ); + break; + case FilterConditionType.elementIsNotNull: + _buildConditionElementIsNotNull( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + isObjectList: property?.type == IsarType.objectList, + nullValue: value1, + alloc: alloc, + ); + break; + case FilterConditionType.listLength: + _buildListLength( + colPtr: col.ptr, + filterPtr: filterPtr, + embeddedColId: embeddedCol?.id, + propertyId: property?.id, + lower: value1, + upper: value2, + ); + break; + } + + return filterPtr.value; +} + +void _buildConditionIsNull({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, +}) { + if (propertyId != null) { + nCall( + IC.isar_filter_null( + colPtr, + filterPtr, + embeddedColId ?? 0, + propertyId, + ), + ); + } else { + IC.isar_filter_static(filterPtr, false); + } +} + +void _buildConditionIsNotNull({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required Allocator alloc, +}) { + if (propertyId != null) { + final conditionPtr = alloc>(); + nCall( + IC.isar_filter_null( + colPtr, + conditionPtr, + embeddedColId ?? 0, + propertyId, + ), + ); + IC.isar_filter_not(filterPtr, conditionPtr.value); + } else { + IC.isar_filter_static(filterPtr, true); + } +} + +void _buildConditionElementIsNull({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required bool isObjectList, + required Object nullValue, +}) { + if (isObjectList) { + IC.isar_filter_object( + colPtr, + filterPtr, + nullptr, + embeddedColId ?? 0, + propertyId ?? 0, + ); + } else { + _buildConditionEqual( + colPtr: colPtr, + filterPtr: filterPtr, + embeddedColId: embeddedColId, + propertyId: propertyId, + val: nullValue, + epsilon: 0, + caseSensitive: true, + ); + } +} + +void _buildConditionElementIsNotNull({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required bool isObjectList, + required Object nullValue, + required Allocator alloc, +}) { + if (isObjectList) { + final objFilterPtrPtr = alloc>(); + IC.isar_filter_static(objFilterPtrPtr, true); + IC.isar_filter_object( + colPtr, + filterPtr, + objFilterPtrPtr.value, + embeddedColId ?? 0, + propertyId ?? 0, + ); + } else { + _buildConditionGreaterThan( + colPtr: colPtr, + filterPtr: filterPtr, + embeddedColId: embeddedColId, + propertyId: propertyId, + val: nullValue, + include: false, + epsilon: 0, + caseSensitive: true, + ); + } +} + +void _buildConditionEqual({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required Object val, + required bool caseSensitive, + required double epsilon, +}) { + if (val is int) { + if (propertyId == null) { + IC.isar_filter_id(filterPtr, val, true, val, true); + } else { + nCall( + IC.isar_filter_long( + colPtr, + filterPtr, + val, + true, + val, + true, + embeddedColId ?? 0, + propertyId, + ), + ); + } + } else if (val is double) { + final lower = adjustFloatBound( + value: val, + lowerBound: true, + include: true, + epsilon: epsilon, + ); + final upper = adjustFloatBound( + value: val, + lowerBound: false, + include: true, + epsilon: epsilon, + ); + if (lower == null || upper == null) { + IC.isar_filter_static(filterPtr, false); + } else { + nCall( + IC.isar_filter_double( + colPtr, + filterPtr, + lower, + upper, + embeddedColId ?? 0, + propertyId!, + ), + ); + } + } else if (val is Pointer) { + nCall( + IC.isar_filter_string( + colPtr, + filterPtr, + val, + true, + val, + true, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + } else { + throw IsarError('Unsupported type for condition'); + } +} + +void _buildConditionBetween({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required Object lower, + required bool includeLower, + required Object upper, + required bool includeUpper, + required bool caseSensitive, + required double epsilon, +}) { + if (lower is int && upper is int) { + if (propertyId == null) { + IC.isar_filter_id(filterPtr, lower, includeLower, upper, includeUpper); + } else { + nCall( + IC.isar_filter_long( + colPtr, + filterPtr, + lower, + includeLower, + upper, + includeUpper, + embeddedColId ?? 0, + propertyId, + ), + ); + } + } else if (lower is double && upper is double) { + final adjustedLower = adjustFloatBound( + value: lower, + lowerBound: true, + include: includeLower, + epsilon: epsilon, + ); + final adjustedUpper = adjustFloatBound( + value: upper, + lowerBound: false, + include: includeUpper, + epsilon: epsilon, + ); + if (adjustedLower == null || adjustedUpper == null) { + IC.isar_filter_static(filterPtr, false); + } else { + nCall( + IC.isar_filter_double( + colPtr, + filterPtr, + adjustedLower, + adjustedUpper, + embeddedColId ?? 0, + propertyId!, + ), + ); + } + } else if (lower is Pointer && upper is Pointer) { + nCall( + IC.isar_filter_string( + colPtr, + filterPtr, + lower, + includeLower, + upper, + includeUpper, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + } else { + throw IsarError('Unsupported type for condition'); + } +} + +void _buildConditionLessThan({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required Object val, + required bool include, + required bool caseSensitive, + required double epsilon, +}) { + if (val is int) { + if (propertyId == null) { + IC.isar_filter_id(filterPtr, minLong, true, val, include); + } else { + nCall( + IC.isar_filter_long( + colPtr, + filterPtr, + minLong, + true, + val, + include, + embeddedColId ?? 0, + propertyId, + ), + ); + } + } else if (val is double) { + final upper = adjustFloatBound( + value: val, + lowerBound: false, + include: include, + epsilon: epsilon, + ); + if (upper == null) { + IC.isar_filter_static(filterPtr, false); + } else { + nCall( + IC.isar_filter_double( + colPtr, + filterPtr, + minDouble, + upper, + embeddedColId ?? 0, + propertyId!, + ), + ); + } + } else if (val is Pointer) { + nCall( + IC.isar_filter_string( + colPtr, + filterPtr, + nullptr, + true, + val, + include, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + } else { + throw IsarError('Unsupported type for condition'); + } +} + +void _buildConditionGreaterThan({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required Object val, + required bool include, + required bool caseSensitive, + required double epsilon, +}) { + if (val is int) { + if (propertyId == null) { + IC.isar_filter_id(filterPtr, val, include, maxLong, true); + } else { + nCall( + IC.isar_filter_long( + colPtr, + filterPtr, + val, + include, + maxLong, + true, + embeddedColId ?? 0, + propertyId, + ), + ); + } + } else if (val is double) { + final lower = adjustFloatBound( + value: val, + lowerBound: true, + include: include, + epsilon: epsilon, + ); + if (lower == null) { + IC.isar_filter_static(filterPtr, false); + } else { + nCall( + IC.isar_filter_double( + colPtr, + filterPtr, + lower, + maxDouble, + embeddedColId ?? 0, + propertyId!, + ), + ); + } + } else if (val is Pointer) { + nCall( + IC.isar_filter_string( + colPtr, + filterPtr, + val, + include, + maxStr, + true, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + } else { + throw IsarError('Unsupported type for condition'); + } +} + +void _buildConditionStringOp({ + required Pointer colPtr, + required Pointer> filterPtr, + required FilterConditionType conditionType, + required int? embeddedColId, + required int? propertyId, + required Object val, + required bool include, + required bool caseSensitive, +}) { + if (val is Pointer) { + if (val.isNull) { + throw IsarError('String operation value must not be null'); + } + + // ignore: missing_enum_constant_in_switch + switch (conditionType) { + case FilterConditionType.startsWith: + nCall( + IC.isar_filter_string_starts_with( + colPtr, + filterPtr, + val, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + break; + case FilterConditionType.endsWith: + nCall( + IC.isar_filter_string_ends_with( + colPtr, + filterPtr, + val, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + break; + case FilterConditionType.contains: + nCall( + IC.isar_filter_string_contains( + colPtr, + filterPtr, + val, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + break; + case FilterConditionType.matches: + nCall( + IC.isar_filter_string_matches( + colPtr, + filterPtr, + val, + caseSensitive, + embeddedColId ?? 0, + propertyId!, + ), + ); + break; + } + } else { + throw IsarError('Unsupported type for condition'); + } +} + +void _buildListLength({ + required Pointer colPtr, + required Pointer> filterPtr, + required int? embeddedColId, + required int? propertyId, + required Object? lower, + required Object? upper, +}) { + if (lower is int && upper is int) { + nCall( + IC.isar_filter_list_length( + colPtr, + filterPtr, + lower, + upper, + embeddedColId ?? 0, + propertyId!, + ), + ); + } else { + throw IsarError('Unsupported type for condition'); + } +} diff --git a/lib/src/native/query_impl.dart b/lib/src/native/query_impl.dart new file mode 100644 index 0000000..a5d2452 --- /dev/null +++ b/lib/src/native/query_impl.dart @@ -0,0 +1,261 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:async'; +import 'dart:ffi'; +import 'dart:isolate'; +import 'dart:typed_data'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/native/bindings.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/isar_collection_impl.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/txn.dart'; + +typedef QueryDeserialize = List Function(CObjectSet); + +class QueryImpl extends Query implements Finalizable { + QueryImpl(this.col, this.queryPtr, this.deserialize, this.propertyId) { + NativeFinalizer(isarQueryFree).attach(this, queryPtr.cast()); + } + static const int maxLimit = 4294967295; + + final IsarCollectionImpl col; + final Pointer queryPtr; + final QueryDeserialize deserialize; + final int? propertyId; + + @override + Isar get isar => col.isar; + + @override + Future findFirst() { + return findInternal(maxLimit).then((List result) { + if (result.isNotEmpty) { + return result[0]; + } else { + return null; + } + }); + } + + @override + Future> findAll() => findInternal(maxLimit); + + Future> findInternal(int limit) { + return col.isar.getTxn(false, (Txn txn) async { + final resultsPtr = txn.alloc(); + try { + IC.isar_q_find(queryPtr, txn.ptr, resultsPtr, limit); + await txn.wait(); + return deserialize(resultsPtr.ref).cast(); + } finally { + IC.isar_free_c_object_set(resultsPtr); + } + }); + } + + @override + T? findFirstSync() { + final results = findSyncInternal(1); + if (results.isNotEmpty) { + return results[0]; + } else { + return null; + } + } + + @override + List findAllSync() => findSyncInternal(maxLimit); + + List findSyncInternal(int limit) { + return col.isar.getTxnSync(false, (Txn txn) { + final resultsPtr = txn.getCObjectsSet(); + try { + nCall(IC.isar_q_find(queryPtr, txn.ptr, resultsPtr, limit)); + return deserialize(resultsPtr.ref).cast(); + } finally { + IC.isar_free_c_object_set(resultsPtr); + } + }); + } + + @override + Future deleteFirst() => + deleteInternal(1).then((int count) => count == 1); + + @override + Future deleteAll() => deleteInternal(maxLimit); + + Future deleteInternal(int limit) { + return col.isar.getTxn(false, (Txn txn) async { + final countPtr = txn.alloc(); + IC.isar_q_delete(queryPtr, col.ptr, txn.ptr, limit, countPtr); + await txn.wait(); + return countPtr.value; + }); + } + + @override + bool deleteFirstSync() => deleteSyncInternal(1) == 1; + + @override + int deleteAllSync() => deleteSyncInternal(maxLimit); + + int deleteSyncInternal(int limit) { + return col.isar.getTxnSync(false, (Txn txn) { + final countPtr = txn.alloc(); + nCall(IC.isar_q_delete(queryPtr, col.ptr, txn.ptr, limit, countPtr)); + return countPtr.value; + }); + } + + @override + Stream> watch({bool fireImmediately = false}) { + return watchLazy(fireImmediately: fireImmediately) + .asyncMap((event) => findAll()); + } + + @override + Stream watchLazy({bool fireImmediately = false}) { + final port = ReceivePort(); + final handle = IC.isar_watch_query( + col.isar.ptr, + col.ptr, + queryPtr, + port.sendPort.nativePort, + ); + + final controller = StreamController( + onCancel: () { + IC.isar_stop_watching(handle); + port.close(); + }, + ); + + if (fireImmediately) { + controller.add(null); + } + + controller.addStream(port); + return controller.stream; + } + + @override + Future exportJsonRaw(R Function(Uint8List) callback) { + return col.isar.getTxn(false, (Txn txn) async { + final bytesPtrPtr = txn.alloc>(); + final lengthPtr = txn.alloc(); + final idNamePtr = col.schema.idName.toCString(txn.alloc); + + nCall( + IC.isar_q_export_json( + queryPtr, + col.ptr, + txn.ptr, + idNamePtr, + bytesPtrPtr, + lengthPtr, + ), + ); + + try { + await txn.wait(); + final bytes = bytesPtrPtr.value.asTypedList(lengthPtr.value); + return callback(bytes); + } finally { + IC.isar_free_json(bytesPtrPtr.value, lengthPtr.value); + } + }); + } + + @override + R exportJsonRawSync(R Function(Uint8List) callback) { + return col.isar.getTxnSync(false, (Txn txn) { + final bytesPtrPtr = txn.alloc>(); + final lengthPtr = txn.alloc(); + final idNamePtr = col.schema.idName.toCString(txn.alloc); + + try { + nCall( + IC.isar_q_export_json( + queryPtr, + col.ptr, + txn.ptr, + idNamePtr, + bytesPtrPtr, + lengthPtr, + ), + ); + final bytes = bytesPtrPtr.value.asTypedList(lengthPtr.value); + return callback(bytes); + } finally { + IC.isar_free_json(bytesPtrPtr.value, lengthPtr.value); + } + }); + } + + @override + Future aggregate(AggregationOp op) async { + return col.isar.getTxn(false, (Txn txn) async { + final resultPtrPtr = txn.alloc>(); + + IC.isar_q_aggregate( + col.ptr, + queryPtr, + txn.ptr, + op.index, + propertyId ?? 0, + resultPtrPtr, + ); + await txn.wait(); + + return _convertAggregatedResult(resultPtrPtr.value, op); + }); + } + + @override + R? aggregateSync(AggregationOp op) { + return col.isar.getTxnSync(false, (Txn txn) { + final resultPtrPtr = txn.alloc>(); + + nCall( + IC.isar_q_aggregate( + col.ptr, + queryPtr, + txn.ptr, + op.index, + propertyId ?? 0, + resultPtrPtr, + ), + ); + return _convertAggregatedResult(resultPtrPtr.value, op); + }); + } + + R? _convertAggregatedResult( + Pointer resultPtr, + AggregationOp op, + ) { + final nullable = op == AggregationOp.min || op == AggregationOp.max; + if (R == int || R == DateTime) { + final value = IC.isar_q_aggregate_long_result(resultPtr); + if (nullable && value == nullLong) { + return null; + } + if (R == int) { + return value as R; + } else { + return DateTime.fromMicrosecondsSinceEpoch(value, isUtc: true).toLocal() + as R; + } + } else { + final value = IC.isar_q_aggregate_double_result(resultPtr); + if (nullable && value.isNaN) { + return null; + } else { + return value as R; + } + } + } +} diff --git a/lib/src/native/split_words.dart b/lib/src/native/split_words.dart new file mode 100644 index 0000000..6a887fb --- /dev/null +++ b/lib/src/native/split_words.dart @@ -0,0 +1,33 @@ +import 'dart:ffi'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/src/native/encode_string.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/isar_reader_impl.dart'; + +// ignore: public_member_api_docs +List isarSplitWords(String input) { + initializeCoreBinary(); + + final bytesPtr = malloc(input.length * 3); + final bytes = bytesPtr.asTypedList(input.length * 3); + final byteCount = encodeString(input, bytes, 0); + + final wordCountPtr = malloc(); + final boundariesPtr = + IC.isar_find_word_boundaries(bytesPtr.cast(), byteCount, wordCountPtr); + final wordCount = wordCountPtr.value; + final boundaries = boundariesPtr.asTypedList(wordCount * 2); + + final words = []; + for (var i = 0; i < wordCount * 2; i++) { + final wordBytes = bytes.sublist(boundaries[i++], boundaries[i]); + words.add(IsarReaderImpl.utf8Decoder.convert(wordBytes)); + } + + IC.isar_free_word_boundaries(boundariesPtr, wordCount); + malloc.free(bytesPtr); + malloc.free(wordCountPtr); + + return words; +} diff --git a/lib/src/native/txn.dart b/lib/src/native/txn.dart new file mode 100644 index 0000000..6017aef --- /dev/null +++ b/lib/src/native/txn.dart @@ -0,0 +1,113 @@ +import 'dart:async'; +import 'dart:collection'; +import 'dart:ffi'; + +import 'package:ffi/ffi.dart'; +import 'package:isar/isar.dart'; +import 'package:isar/src/common/isar_common.dart'; +import 'package:isar/src/native/bindings.dart'; +import 'package:isar/src/native/isar_core.dart'; + +/// @nodoc +class Txn extends Transaction { + /// @nodoc + Txn.sync(Isar isar, this.ptr, bool write) : super(isar, true, write); + + /// @nodoc + Txn.async(Isar isar, this.ptr, bool write, Stream stream) + : super(isar, false, write) { + _completers = Queue(); + _portSubscription = stream.listen( + (_) => _completers.removeFirst().complete(), + onError: (Object e) => _completers.removeFirst().completeError(e), + ); + } + + @override + bool active = true; + + /// An arena allocator that has the same lifetime as this transaction. + final alloc = Arena(malloc); + + /// The pointer to the native transaction. + final Pointer ptr; + Pointer? _cObjPtr; + Pointer? _cObjSetPtr; + + late Pointer _buffer; + int _bufferLen = -1; + + late final Queue> _completers; + late final StreamSubscription? _portSubscription; + + /// Get a shared CObject pointer + Pointer getCObject() { + _cObjPtr ??= alloc(); + return _cObjPtr!; + } + + /// Get a shared CObjectSet pointer + Pointer getCObjectsSet() { + _cObjSetPtr ??= alloc(); + return _cObjSetPtr!; + } + + /// Allocate a new CObjectSet with the given capacity. + Pointer newCObjectSet(int length) { + final cObjSetPtr = alloc(); + cObjSetPtr.ref + ..objects = alloc(length) + ..length = length; + return cObjSetPtr; + } + + /// Get a shared buffer with at least the specified size. + Pointer getBuffer(int size) { + if (_bufferLen < size) { + final allocSize = (size * 1.3).toInt(); + _buffer = alloc(allocSize); + _bufferLen = allocSize; + } + return _buffer; + } + + /// Wait for the latest async operation to complete. + Future wait() { + final completer = Completer(); + _completers.add(completer); + return completer.future; + } + + @override + Future commit() async { + active = false; + IC.isar_txn_finish(ptr, true); + await wait(); + unawaited(_portSubscription!.cancel()); + } + + @override + void commitSync() { + active = false; + nCall(IC.isar_txn_finish(ptr, true)); + } + + @override + Future abort() async { + active = false; + IC.isar_txn_finish(ptr, false); + await wait(); + unawaited(_portSubscription!.cancel()); + } + + @override + void abortSync() { + active = false; + nCall(IC.isar_txn_finish(ptr, false)); + } + + @override + void free() { + alloc.releaseAll(); + } +} diff --git a/lib/src/query.dart b/lib/src/query.dart new file mode 100644 index 0000000..89c7bd7 --- /dev/null +++ b/lib/src/query.dart @@ -0,0 +1,204 @@ +part of isar; + +/// Querying is how you find records that match certain conditions. +abstract class Query { + /// The default precision for floating point number queries. + static const double epsilon = 0.00001; + + /// The corresponding Isar instance. + Isar get isar; + + /// {@template query_find_first} + /// Find the first object that matches this query or `null` if no object + /// matches. + /// {@endtemplate} + Future findFirst(); + + /// {@macro query_find_first} + T? findFirstSync(); + + /// {@template query_find_all} + /// Find all objects that match this query. + /// {@endtemplate} + Future> findAll(); + + /// {@macro query_find_all} + List findAllSync(); + + /// @nodoc + @protected + Future aggregate(AggregationOp op); + + /// @nodoc + @protected + R? aggregateSync(AggregationOp op); + + /// {@template query_count} + /// Count how many objects match this query. + /// + /// This operation is much faster than using `findAll().length`. + /// {@endtemplate} + Future count() => + aggregate(AggregationOp.count).then((int? value) => value!); + + /// {@macro query_count} + int countSync() => aggregateSync(AggregationOp.count)!; + + /// {@template query_is_empty} + /// Returns `true` if there are no objects that match this query. + /// + /// This operation is faster than using `count() == 0`. + /// {@endtemplate} + Future isEmpty() => + aggregate(AggregationOp.isEmpty).then((value) => value == 1); + + /// {@macro query_is_empty} + bool isEmptySync() => aggregateSync(AggregationOp.isEmpty) == 1; + + /// {@template query_is_not_empty} + /// Returns `true` if there are objects that match this query. + /// + /// This operation is faster than using `count() > 0`. + /// {@endtemplate} + Future isNotEmpty() => + aggregate(AggregationOp.isEmpty).then((value) => value == 0); + + /// {@macro query_is_not_empty} + bool isNotEmptySync() => aggregateSync(AggregationOp.isEmpty) == 0; + + /// {@template query_delete_first} + /// Delete the first object that matches this query. Returns whether a object + /// has been deleted. + /// {@endtemplate} + Future deleteFirst(); + + /// {@macro query_delete_first} + bool deleteFirstSync(); + + /// {@template query_delete_all} + /// Delete all objects that match this query. Returns the number of deleted + /// objects. + /// {@endtemplate} + Future deleteAll(); + + /// {@macro query_delete_all} + int deleteAllSync(); + + /// {@template query_watch} + /// Create a watcher that yields the results of this query whenever its + /// results have (potentially) changed. + /// + /// If you don't always use the results, consider using `watchLazy` and rerun + /// the query manually. If [fireImmediately] is `true`, the results will be + /// sent to the consumer immediately. + /// {@endtemplate} + Stream> watch({bool fireImmediately = false}); + + /// {@template query_watch_lazy} + /// Watch the query for changes. If [fireImmediately] is `true`, an event will + /// be fired immediately. + /// {@endtemplate} + Stream watchLazy({bool fireImmediately = false}); + + /// {@template query_export_json_raw} + /// Export the results of this query as json bytes. + /// + /// **IMPORTANT:** Do not leak the bytes outside the callback. If you need to + /// use the bytes outside, create a copy of the `Uint8List`. + /// {@endtemplate} + Future exportJsonRaw(R Function(Uint8List) callback); + + /// {@macro query_export_json_raw} + R exportJsonRawSync(R Function(Uint8List) callback); + + /// {@template query_export_json} + /// Export the results of this query as json. + /// {@endtemplate} + Future>> exportJson() { + return exportJsonRaw((Uint8List bytes) { + final json = jsonDecode(const Utf8Decoder().convert(bytes)) as List; + return json.cast>(); + }); + } + + /// {@macro query_export_json} + List> exportJsonSync() { + return exportJsonRawSync((Uint8List bytes) { + final json = jsonDecode(const Utf8Decoder().convert(bytes)) as List; + return json.cast>(); + }); + } +} + +/// @nodoc +@protected +enum AggregationOp { + /// Finds the smallest value. + min, + + /// Finds the largest value. + max, + + /// Calculates the sum of all values. + sum, + + /// Calculates the average of all values. + average, + + /// Counts all values. + count, + + /// Returns `true` if the query has no results. + isEmpty, +} + +/// Extension for Queries +extension QueryAggregation on Query { + /// {@template aggregation_min} + /// Returns the minimum value of this query. + /// {@endtemplate} + Future min() => aggregate(AggregationOp.min); + + /// {@macro aggregation_min} + T? minSync() => aggregateSync(AggregationOp.min); + + /// {@template aggregation_max} + /// Returns the maximum value of this query. + /// {@endtemplate} + Future max() => aggregate(AggregationOp.max); + + /// {@macro aggregation_max} + T? maxSync() => aggregateSync(AggregationOp.max); + + /// {@template aggregation_average} + /// Returns the average value of this query. + /// {@endtemplate} + Future average() => + aggregate(AggregationOp.average).then((double? value) => value!); + + /// {@macro aggregation_average} + double averageSync() => aggregateSync(AggregationOp.average)!; + + /// {@template aggregation_sum} + /// Returns the sum of all values of this query. + /// {@endtemplate} + Future sum() => aggregate(AggregationOp.sum).then((value) => value!); + + /// {@macro aggregation_sum} + T sumSync() => aggregateSync(AggregationOp.sum)!; +} + +/// Extension for Queries +extension QueryDateAggregation on Query { + /// {@macro aggregation_min} + Future min() => aggregate(AggregationOp.min); + + /// {@macro aggregation_min} + DateTime? minSync() => aggregateSync(AggregationOp.min); + + /// {@macro aggregation_max} + Future max() => aggregate(AggregationOp.max); + + /// {@macro aggregation_max} + DateTime? maxSync() => aggregateSync(AggregationOp.max); +} diff --git a/lib/src/query_builder.dart b/lib/src/query_builder.dart new file mode 100644 index 0000000..a29e6d2 --- /dev/null +++ b/lib/src/query_builder.dart @@ -0,0 +1,403 @@ +part of isar; + +/// @nodoc +@protected +typedef FilterQuery = QueryBuilder + Function(QueryBuilder q); + +/// Query builders are used to create queries in a safe way. +/// +/// Acquire a `QueryBuilder` instance using `collection.where()` or +/// `collection.filter()`. +class QueryBuilder { + /// @nodoc + @protected + const QueryBuilder(this._query); + + final QueryBuilderInternal _query; + + /// @nodoc + @protected + static QueryBuilder apply( + QueryBuilder qb, + QueryBuilderInternal Function(QueryBuilderInternal query) + transform, + ) { + return QueryBuilder(transform(qb._query)); + } +} + +/// @nodoc +@protected +class QueryBuilderInternal { + /// @nodoc + const QueryBuilderInternal({ + this.collection, + this.whereClauses = const [], + this.whereDistinct = false, + this.whereSort = Sort.asc, + this.filter = const FilterGroup.and([]), + this.filterGroupType = FilterGroupType.and, + this.filterNot = false, + this.distinctByProperties = const [], + this.sortByProperties = const [], + this.offset, + this.limit, + this.propertyName, + }); + + /// @nodoc + final IsarCollection? collection; + + /// @nodoc + final List whereClauses; + + /// @nodoc + final bool whereDistinct; + + /// @nodoc + final Sort whereSort; + + /// @nodoc + final FilterGroup filter; + + /// @nodoc + final FilterGroupType filterGroupType; + + /// @nodoc + final bool filterNot; + + /// @nodoc + final List distinctByProperties; + + /// @nodoc + final List sortByProperties; + + /// @nodoc + final int? offset; + + /// @nodoc + final int? limit; + + /// @nodoc + final String? propertyName; + + /// @nodoc + QueryBuilderInternal addFilterCondition(FilterOperation cond) { + if (filterNot) { + cond = FilterGroup.not(cond); + } + + late FilterGroup filterGroup; + + if (filter.type == filterGroupType || filter.filters.length <= 1) { + filterGroup = FilterGroup( + type: filterGroupType, + filters: [...filter.filters, cond], + ); + } else if (filterGroupType == FilterGroupType.and) { + filterGroup = FilterGroup( + type: filter.type, + filters: [ + ...filter.filters.sublist(0, filter.filters.length - 1), + FilterGroup( + type: filterGroupType, + filters: [filter.filters.last, cond], + ), + ], + ); + } else { + filterGroup = FilterGroup( + type: filterGroupType, + filters: [filter, cond], + ); + } + + return copyWith( + filter: filterGroup, + filterGroupType: FilterGroupType.and, + filterNot: false, + ); + } + + /// @nodoc + QueryBuilderInternal addWhereClause(WhereClause where) { + return copyWith(whereClauses: [...whereClauses, where]); + } + + /// @nodoc + QueryBuilderInternal group(FilterQuery q) { + // ignore: prefer_const_constructors + final qb = q(QueryBuilder(QueryBuilderInternal())); + return addFilterCondition(qb._query.filter); + } + + /// @nodoc + QueryBuilderInternal listLength( + String property, + int lower, + bool includeLower, + int upper, + bool includeUpper, + ) { + if (!includeLower) { + lower += 1; + } + if (!includeUpper) { + if (upper == 0) { + lower = 1; + } else { + upper -= 1; + } + } + return addFilterCondition( + FilterCondition.listLength( + property: property, + lower: lower, + upper: upper, + ), + ); + } + + /// @nodoc + QueryBuilderInternal object( + FilterQuery q, + String property, + ) { + // ignore: prefer_const_constructors + final qb = q(QueryBuilder(QueryBuilderInternal())); + return addFilterCondition( + ObjectFilter(filter: qb._query.filter, property: property), + ); + } + + /// @nodoc + QueryBuilderInternal link( + FilterQuery q, + String linkName, + ) { + // ignore: prefer_const_constructors + final qb = q(QueryBuilder(QueryBuilderInternal())); + return addFilterCondition( + LinkFilter(filter: qb._query.filter, linkName: linkName), + ); + } + + /// @nodoc + QueryBuilderInternal linkLength( + String linkName, + int lower, + bool includeLower, + int upper, + bool includeUpper, + ) { + if (!includeLower) { + lower += 1; + } + if (!includeUpper) { + if (upper == 0) { + lower = 1; + } else { + upper -= 1; + } + } + return addFilterCondition( + LinkFilter.length( + lower: lower, + upper: upper, + linkName: linkName, + ), + ); + } + + /// @nodoc + QueryBuilderInternal addSortBy(String propertyName, Sort sort) { + return copyWith( + sortByProperties: [ + ...sortByProperties, + SortProperty(property: propertyName, sort: sort), + ], + ); + } + + /// @nodoc + QueryBuilderInternal addDistinctBy( + String propertyName, { + bool? caseSensitive, + }) { + return copyWith( + distinctByProperties: [ + ...distinctByProperties, + DistinctProperty( + property: propertyName, + caseSensitive: caseSensitive, + ), + ], + ); + } + + /// @nodoc + QueryBuilderInternal addPropertyName(String propertyName) { + return copyWith(propertyName: propertyName); + } + + /// @nodoc + QueryBuilderInternal copyWith({ + List? whereClauses, + FilterGroup? filter, + bool? filterIsGrouped, + FilterGroupType? filterGroupType, + bool? filterNot, + List? parentFilters, + List? distinctByProperties, + List? sortByProperties, + int? offset, + int? limit, + String? propertyName, + }) { + assert(offset == null || offset >= 0, 'Invalid offset'); + assert(limit == null || limit >= 0, 'Invalid limit'); + return QueryBuilderInternal( + collection: collection, + whereClauses: whereClauses ?? List.unmodifiable(this.whereClauses), + whereDistinct: whereDistinct, + whereSort: whereSort, + filter: filter ?? this.filter, + filterGroupType: filterGroupType ?? this.filterGroupType, + filterNot: filterNot ?? this.filterNot, + distinctByProperties: + distinctByProperties ?? List.unmodifiable(this.distinctByProperties), + sortByProperties: + sortByProperties ?? List.unmodifiable(this.sortByProperties), + offset: offset ?? this.offset, + limit: limit ?? this.limit, + propertyName: propertyName ?? this.propertyName, + ); + } + + /// @nodoc + @protected + Query build() { + return collection!.buildQuery( + whereDistinct: whereDistinct, + whereSort: whereSort, + whereClauses: whereClauses, + filter: filter, + sortBy: sortByProperties, + distinctBy: distinctByProperties, + offset: offset, + limit: limit, + property: propertyName, + ); + } +} + +/// @nodoc +/// +/// Right after query starts +@protected +class QWhere + implements + QWhereClause, + QFilter, + QSortBy, + QDistinct, + QOffset, + QLimit, + QQueryProperty {} + +/// @nodoc +/// +/// No more where conditions are allowed +@protected +class QAfterWhere + implements QFilter, QSortBy, QDistinct, QOffset, QLimit, QQueryProperty {} + +/// @nodoc +@protected +class QWhereClause {} + +/// @nodoc +@protected +class QAfterWhereClause + implements + QWhereOr, + QFilter, + QSortBy, + QDistinct, + QOffset, + QLimit, + QQueryProperty {} + +/// @nodoc +@protected +class QWhereOr {} + +/// @nodoc +@protected +class QFilter {} + +/// @nodoc +@protected +class QFilterCondition {} + +/// @nodoc +@protected +class QAfterFilterCondition + implements + QFilterCondition, + QFilterOperator, + QSortBy, + QDistinct, + QOffset, + QLimit, + QQueryProperty {} + +/// @nodoc +@protected +class QFilterOperator {} + +/// @nodoc +@protected +class QAfterFilterOperator implements QFilterCondition {} + +/// @nodoc +@protected +class QSortBy {} + +/// @nodoc +@protected +class QAfterSortBy + implements QSortThenBy, QDistinct, QOffset, QLimit, QQueryProperty {} + +/// @nodoc +@protected +class QSortThenBy {} + +/// @nodoc +@protected +class QDistinct implements QOffset, QLimit, QQueryProperty {} + +/// @nodoc +@protected +class QOffset {} + +/// @nodoc +@protected +class QAfterOffset implements QLimit, QQueryProperty {} + +/// @nodoc +@protected +class QLimit {} + +/// @nodoc +@protected +class QAfterLimit implements QQueryProperty {} + +/// @nodoc +@protected +class QQueryProperty implements QQueryOperations {} + +/// @nodoc +@protected +class QQueryOperations {} diff --git a/lib/src/query_builder_extensions.dart b/lib/src/query_builder_extensions.dart new file mode 100644 index 0000000..70be3c0 --- /dev/null +++ b/lib/src/query_builder_extensions.dart @@ -0,0 +1,303 @@ +part of isar; + +/// Extension for QueryBuilders. +extension QueryWhereOr on QueryBuilder { + /// Union of two where clauses. + QueryBuilder or() { + return QueryBuilder(_query); + } +} + +/// @nodoc +@protected +typedef WhereRepeatModifier = QueryBuilder + Function(QueryBuilder q, E element); + +/// Extension for QueryBuilders. +extension QueryWhere on QueryBuilder { + /// Joins the results of the [modifier] for each item in [items] using logical + /// OR. So an object will be included if it matches at least one of the + /// resulting where clauses. + /// + /// If [items] is empty, this is a no-op. + QueryBuilder anyOf( + Iterable items, + WhereRepeatModifier modifier, + ) { + QueryBuilder? q; + for (final e in items) { + q = modifier(q?.or() ?? QueryBuilder(_query), e); + } + return q ?? QueryBuilder(_query); + } +} + +/// Extension for QueryBuilders. +extension QueryFilters on QueryBuilder { + /// Start using filter conditions. + QueryBuilder filter() { + return QueryBuilder(_query); + } +} + +/// @nodoc +@protected +typedef FilterRepeatModifier + = QueryBuilder Function( + QueryBuilder q, + E element, +); + +/// Extension for QueryBuilders. +extension QueryFilterAndOr on QueryBuilder { + /// Intersection of two filter conditions. + QueryBuilder and() { + return QueryBuilder.apply( + this, + (q) => q.copyWith(filterGroupType: FilterGroupType.and), + ); + } + + /// Union of two filter conditions. + QueryBuilder or() { + return QueryBuilder.apply( + this, + (q) => q.copyWith(filterGroupType: FilterGroupType.or), + ); + } + + /// Logical XOR of two filter conditions. + QueryBuilder xor() { + return QueryBuilder.apply( + this, + (q) => q.copyWith(filterGroupType: FilterGroupType.xor), + ); + } +} + +/// Extension for QueryBuilders. +extension QueryFilterNot on QueryBuilder { + /// Complement the next filter condition or group. + QueryBuilder not() { + return QueryBuilder.apply( + this, + (q) => q.copyWith(filterNot: !q.filterNot), + ); + } + + /// Joins the results of the [modifier] for each item in [items] using logical + /// OR. So an object will be included if it matches at least one of the + /// resulting filters. + /// + /// If [items] is empty, this is a no-op. + QueryBuilder anyOf( + Iterable items, + FilterRepeatModifier modifier, + ) { + return QueryBuilder.apply(this, (query) { + return query.group((q) { + var q2 = QueryBuilder(q._query); + for (final e in items) { + q2 = modifier(q2.or(), e); + } + return q2; + }); + }); + } + + /// Joins the results of the [modifier] for each item in [items] using logical + /// AND. So an object will be included if it matches all of the resulting + /// filters. + /// + /// If [items] is empty, this is a no-op. + QueryBuilder allOf( + Iterable items, + FilterRepeatModifier modifier, + ) { + return QueryBuilder.apply(this, (query) { + return query.group((q) { + var q2 = QueryBuilder(q._query); + for (final e in items) { + q2 = modifier(q2.and(), e); + } + return q2; + }); + }); + } + + /// Joins the results of the [modifier] for each item in [items] using logical + /// XOR. So an object will be included if it matches exactly one of the + /// resulting filters. + /// + /// If [items] is empty, this is a no-op. + QueryBuilder oneOf( + Iterable items, + FilterRepeatModifier modifier, + ) { + QueryBuilder? q; + for (final e in items) { + q = modifier(q?.xor() ?? QueryBuilder(_query), e); + } + return q ?? QueryBuilder(_query); + } +} + +/// Extension for QueryBuilders. +extension QueryFilterNoGroups + on QueryBuilder { + /// Group filter conditions. + QueryBuilder group(FilterQuery q) { + return QueryBuilder.apply(this, (query) => query.group(q)); + } +} + +/// Extension for QueryBuilders. +extension QueryOffset on QueryBuilder { + /// Offset the query results by a static number. + QueryBuilder offset(int offset) { + return QueryBuilder.apply(this, (q) => q.copyWith(offset: offset)); + } +} + +/// Extension for QueryBuilders. +extension QueryLimit on QueryBuilder { + /// Limit the maximum number of query results. + QueryBuilder limit(int limit) { + return QueryBuilder.apply(this, (q) => q.copyWith(limit: limit)); + } +} + +/// @nodoc +@protected +typedef QueryOption = QueryBuilder Function( + QueryBuilder q, +); + +/// Extension for QueryBuilders. +extension QueryModifier on QueryBuilder { + /// Only apply a part of the query if `enabled` is true. + QueryBuilder optional( + bool enabled, + QueryOption option, + ) { + if (enabled) { + return option(this); + } else { + return QueryBuilder(_query); + } + } +} + +/// Extension for QueryBuilders +extension QueryExecute on QueryBuilder { + /// Create a query from this query builder. + Query build() => _query.build(); + + /// {@macro query_find_first} + Future findFirst() => build().findFirst(); + + /// {@macro query_find_first} + R? findFirstSync() => build().findFirstSync(); + + /// {@macro query_find_all} + Future> findAll() => build().findAll(); + + /// {@macro query_find_all} + List findAllSync() => build().findAllSync(); + + /// {@macro query_count} + Future count() => build().count(); + + /// {@macro query_count} + int countSync() => build().countSync(); + + /// {@macro query_is_empty} + Future isEmpty() => build().isEmpty(); + + /// {@macro query_is_empty} + bool isEmptySync() => build().isEmptySync(); + + /// {@macro query_is_not_empty} + Future isNotEmpty() => build().isNotEmpty(); + + /// {@macro query_is_not_empty} + bool isNotEmptySync() => build().isNotEmptySync(); + + /// {@macro query_delete_first} + Future deleteFirst() => build().deleteFirst(); + + /// {@macro query_delete_first} + bool deleteFirstSync() => build().deleteFirstSync(); + + /// {@macro query_delete_all} + Future deleteAll() => build().deleteAll(); + + /// {@macro query_delete_all} + int deleteAllSync() => build().deleteAllSync(); + + /// {@macro query_watch} + Stream> watch({bool fireImmediately = false}) => + build().watch(fireImmediately: fireImmediately); + + /// {@macro query_watch_lazy} + Stream watchLazy({bool fireImmediately = false}) => + build().watchLazy(fireImmediately: fireImmediately); + + /// {@macro query_export_json_raw} + Future exportJsonRaw(T Function(Uint8List) callback) => + build().exportJsonRaw(callback); + + /// {@macro query_export_json_raw} + T exportJsonRawSync(T Function(Uint8List) callback) => + build().exportJsonRawSync(callback); + + /// {@macro query_export_json} + Future>> exportJson() => build().exportJson(); + + /// {@macro query_export_json} + List> exportJsonSync() => build().exportJsonSync(); +} + +/// Extension for QueryBuilders +extension QueryExecuteAggregation + on QueryBuilder { + /// {@macro aggregation_min} + Future min() => build().min(); + + /// {@macro aggregation_min} + T? minSync() => build().minSync(); + + /// {@macro aggregation_max} + Future max() => build().max(); + + /// {@macro aggregation_max} + T? maxSync() => build().maxSync(); + + /// {@macro aggregation_average} + Future average() => build().average(); + + /// {@macro aggregation_average} + double averageSync() => build().averageSync(); + + /// {@macro aggregation_sum} + Future sum() => build().sum(); + + /// {@macro aggregation_sum} + T sumSync() => build().sumSync(); +} + +/// Extension for QueryBuilders +extension QueryExecuteDateAggregation + on QueryBuilder { + /// {@macro aggregation_min} + Future min() => build().min(); + + /// {@macro aggregation_min} + DateTime? minSync() => build().minSync(); + + /// {@macro aggregation_max} + Future max() => build().max(); + + /// {@macro aggregation_max} + DateTime? maxSync() => build().maxSync(); +} diff --git a/lib/src/query_components.dart b/lib/src/query_components.dart new file mode 100644 index 0000000..a2bbfdb --- /dev/null +++ b/lib/src/query_components.dart @@ -0,0 +1,597 @@ +part of isar; + +/// A where clause to traverse an Isar index. +abstract class WhereClause { + const WhereClause._(); +} + +/// A where clause traversing the primary index (ids). +class IdWhereClause extends WhereClause { + /// Where clause that matches all ids. Useful to get sorted results. + const IdWhereClause.any() + : lower = null, + upper = null, + includeLower = true, + includeUpper = true, + super._(); + + /// Where clause that matches all id values greater than the given [lower] + /// bound. + const IdWhereClause.greaterThan({ + required Id this.lower, + this.includeLower = true, + }) : upper = null, + includeUpper = true, + super._(); + + /// Where clause that matches all id values less than the given [upper] + /// bound. + const IdWhereClause.lessThan({ + required Id this.upper, + this.includeUpper = true, + }) : lower = null, + includeLower = true, + super._(); + + /// Where clause that matches the id value equal to the given [value]. + const IdWhereClause.equalTo({ + required Id value, + }) : lower = value, + upper = value, + includeLower = true, + includeUpper = true, + super._(); + + /// Where clause that matches all id values between the given [lower] and + /// [upper] bounds. + const IdWhereClause.between({ + this.lower, + this.includeLower = true, + this.upper, + this.includeUpper = true, + }) : super._(); + + /// The lower bound id or `null` for unbounded. + final Id? lower; + + /// Whether the lower bound should be included in the results. + final bool includeLower; + + /// The upper bound id or `null` for unbounded. + final Id? upper; + + /// Whether the upper bound should be included in the results. + final bool includeUpper; +} + +/// A where clause traversing an index. +class IndexWhereClause extends WhereClause { + /// Where clause that matches all index values. Useful to get sorted results. + const IndexWhereClause.any({required this.indexName}) + : lower = null, + upper = null, + includeLower = true, + includeUpper = true, + epsilon = Query.epsilon, + super._(); + + /// Where clause that matches all index values greater than the given [lower] + /// bound. + /// + /// For composite indexes, the first elements of the [lower] list are checked + /// for equality. + const IndexWhereClause.greaterThan({ + required this.indexName, + required IndexKey this.lower, + this.includeLower = true, + this.epsilon = Query.epsilon, + }) : upper = null, + includeUpper = true, + super._(); + + /// Where clause that matches all index values less than the given [upper] + /// bound. + /// + /// For composite indexes, the first elements of the [upper] list are checked + /// for equality. + const IndexWhereClause.lessThan({ + required this.indexName, + required IndexKey this.upper, + this.includeUpper = true, + this.epsilon = Query.epsilon, + }) : lower = null, + includeLower = true, + super._(); + + /// Where clause that matches all index values equal to the given [value]. + const IndexWhereClause.equalTo({ + required this.indexName, + required IndexKey value, + this.epsilon = Query.epsilon, + }) : lower = value, + upper = value, + includeLower = true, + includeUpper = true, + super._(); + + /// Where clause that matches all index values between the given [lower] and + /// [upper] bounds. + /// + /// For composite indexes, the first elements of the [lower] and [upper] lists + /// are checked for equality. + const IndexWhereClause.between({ + required this.indexName, + required IndexKey this.lower, + this.includeLower = true, + required IndexKey this.upper, + this.includeUpper = true, + this.epsilon = Query.epsilon, + }) : super._(); + + /// The Isar name of the index to be used. + final String indexName; + + /// The lower bound of the where clause. + final IndexKey? lower; + + /// Whether the lower bound should be included in the results. Double values + /// are never included. + final bool includeLower; + + /// The upper bound of the where clause. + final IndexKey? upper; + + /// Whether the upper bound should be included in the results. Double values + /// are never included. + final bool includeUpper; + + /// The precision to use for floating point values. + final double epsilon; +} + +/// A where clause traversing objects linked to the specified object. +class LinkWhereClause extends WhereClause { + /// Create a where clause for the specified link. + const LinkWhereClause({ + required this.linkCollection, + required this.linkName, + required this.id, + }) : super._(); + + /// The name of the collection the link originates from. + final String linkCollection; + + /// The isar name of the link to be used. + final String linkName; + + /// The id of the source object. + final Id id; +} + +/// @nodoc +@protected +abstract class FilterOperation { + const FilterOperation._(); +} + +/// The type of dynamic filter conditions. +enum FilterConditionType { + /// Filter checking for equality. + equalTo, + + /// Filter matching values greater than the bound. + greaterThan, + + /// Filter matching values smaller than the bound. + lessThan, + + /// Filter matching values between the bounds. + between, + + /// Filter matching String values starting with the prefix. + startsWith, + + /// Filter matching String values ending with the suffix. + endsWith, + + /// Filter matching String values containing the String. + contains, + + /// Filter matching String values matching the wildcard. + matches, + + /// Filter matching values that are `null`. + isNull, + + /// Filter matching values that are not `null`. + isNotNull, + + /// Filter matching lists that contain `null`. + elementIsNull, + + /// Filter matching lists that contain an element that is not `null`. + elementIsNotNull, + + /// Filter matching the length of a list. + listLength, +} + +/// Create a filter condition dynamically. +class FilterCondition extends FilterOperation { + /// @nodoc + @protected + const FilterCondition({ + required this.type, + required this.property, + this.value1, + this.value2, + required this.include1, + required this.include2, + required this.caseSensitive, + this.epsilon = Query.epsilon, + }) : super._(); + + /// Filters the results to only include objects where the property equals + /// [value]. + /// + /// For lists, at least one of the values in the list has to match. + const FilterCondition.equalTo({ + required this.property, + required Object? value, + this.caseSensitive = true, + this.epsilon = Query.epsilon, + }) : type = FilterConditionType.equalTo, + value1 = value, + include1 = true, + value2 = null, + include2 = false, + super._(); + + /// Filters the results to only include objects where the property is greater + /// than [value]. + /// + /// For lists, at least one of the values in the list has to match. + const FilterCondition.greaterThan({ + required this.property, + required Object? value, + bool include = false, + this.caseSensitive = true, + this.epsilon = Query.epsilon, + }) : type = FilterConditionType.greaterThan, + value1 = value, + include1 = include, + value2 = null, + include2 = false, + super._(); + + /// Filters the results to only include objects where the property is less + /// than [value]. + /// + /// For lists, at least one of the values in the list has to match. + const FilterCondition.lessThan({ + required this.property, + required Object? value, + bool include = false, + this.caseSensitive = true, + this.epsilon = Query.epsilon, + }) : type = FilterConditionType.lessThan, + value1 = value, + include1 = include, + value2 = null, + include2 = false, + super._(); + + /// Filters the results to only include objects where the property is + /// between [lower] and [upper]. + /// + /// For lists, at least one of the values in the list has to match. + const FilterCondition.between({ + required this.property, + Object? lower, + bool includeLower = true, + Object? upper, + bool includeUpper = true, + this.caseSensitive = true, + this.epsilon = Query.epsilon, + }) : value1 = lower, + include1 = includeLower, + value2 = upper, + include2 = includeUpper, + type = FilterConditionType.between, + super._(); + + /// Filters the results to only include objects where the property starts + /// with [value]. + /// + /// For String lists, at least one of the values in the list has to match. + const FilterCondition.startsWith({ + required this.property, + required String value, + this.caseSensitive = true, + }) : type = FilterConditionType.startsWith, + value1 = value, + include1 = true, + value2 = null, + include2 = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include objects where the property ends with + /// [value]. + /// + /// For String lists, at least one of the values in the list has to match. + const FilterCondition.endsWith({ + required this.property, + required String value, + this.caseSensitive = true, + }) : type = FilterConditionType.endsWith, + value1 = value, + include1 = true, + value2 = null, + include2 = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include objects where the String property + /// contains [value]. + /// + /// For String lists, at least one of the values in the list has to match. + const FilterCondition.contains({ + required this.property, + required String value, + this.caseSensitive = true, + }) : type = FilterConditionType.contains, + value1 = value, + include1 = true, + value2 = null, + include2 = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include objects where the property matches + /// the [wildcard]. + /// + /// For String lists, at least one of the values in the list has to match. + const FilterCondition.matches({ + required this.property, + required String wildcard, + this.caseSensitive = true, + }) : type = FilterConditionType.matches, + value1 = wildcard, + include1 = true, + value2 = null, + include2 = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include objects where the property is null. + const FilterCondition.isNull({ + required this.property, + }) : type = FilterConditionType.isNull, + value1 = null, + include1 = false, + value2 = null, + include2 = false, + caseSensitive = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include objects where the property is not + /// null. + const FilterCondition.isNotNull({ + required this.property, + }) : type = FilterConditionType.isNotNull, + value1 = null, + include1 = false, + value2 = null, + include2 = false, + caseSensitive = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include lists that contain `null`. + const FilterCondition.elementIsNull({ + required this.property, + }) : type = FilterConditionType.elementIsNull, + value1 = null, + include1 = false, + value2 = null, + include2 = false, + caseSensitive = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include lists that do not contain `null`. + const FilterCondition.elementIsNotNull({ + required this.property, + }) : type = FilterConditionType.elementIsNotNull, + value1 = null, + include1 = false, + value2 = null, + include2 = false, + caseSensitive = false, + epsilon = Query.epsilon, + super._(); + + /// Filters the results to only include objects where the length of + /// [property] is between [lower] (included) and [upper] (included). + /// + /// Only list properties are supported. + const FilterCondition.listLength({ + required this.property, + required int lower, + required int upper, + }) : type = FilterConditionType.listLength, + value1 = lower, + include1 = true, + value2 = upper, + include2 = true, + caseSensitive = false, + epsilon = Query.epsilon, + assert(lower >= 0 && upper >= 0, 'List length must be positive.'), + super._(); + + /// Type of the filter condition. + final FilterConditionType type; + + /// Property used for comparisons. + final String property; + + /// Value used for comparisons. Lower bound for `ConditionType.between`. + final Object? value1; + + /// Should `value1` be part of the results. + final bool include1; + + /// Upper bound for `ConditionType.between`. + final Object? value2; + + /// Should `value1` be part of the results. + final bool include2; + + /// Are string operations case sensitive. + final bool caseSensitive; + + /// The precision to use for floating point values. + final double epsilon; +} + +/// The type of filter groups. +enum FilterGroupType { + /// Logical AND. + and, + + /// Logical OR. + or, + + /// Logical XOR. + xor, + + /// Logical NOT. + not, +} + +/// Group one or more filter conditions. +class FilterGroup extends FilterOperation { + /// @nodoc + @protected + FilterGroup({ + required this.type, + required this.filters, + }) : super._(); + + /// Create a logical AND filter group. + /// + /// Matches when all [filters] match. + const FilterGroup.and(this.filters) + : type = FilterGroupType.and, + super._(); + + /// Create a logical OR filter group. + /// + /// Matches when any of the [filters] matches. + const FilterGroup.or(this.filters) + : type = FilterGroupType.or, + super._(); + + /// Create a logical XOR filter group. + /// + /// Matches when exactly one of the [filters] matches. + const FilterGroup.xor(this.filters) + : type = FilterGroupType.xor, + super._(); + + /// Negate a filter. + /// + /// Matches when any of the [filter] doesn't matches. + FilterGroup.not(FilterOperation filter) + : filters = [filter], + type = FilterGroupType.not, + super._(); + + /// Type of this group. + final FilterGroupType type; + + /// The filter(s) to be grouped. + final List filters; +} + +/// Sort order +enum Sort { + /// Ascending sort order. + asc, + + /// Descending sort order. + desc, +} + +/// Property used to sort query results. +class SortProperty { + /// Create a sort property. + const SortProperty({required this.property, required this.sort}); + + /// Isar name of the property used for sorting. + final String property; + + /// Sort order. + final Sort sort; +} + +/// Property used to filter duplicate values. +class DistinctProperty { + /// Create a distinct property. + const DistinctProperty({required this.property, this.caseSensitive}); + + /// Isar name of the property used for sorting. + final String property; + + /// Should Strings be case sensitive? + final bool? caseSensitive; +} + +/// Filter condition based on an embedded object. +class ObjectFilter extends FilterOperation { + /// Create a filter condition based on an embedded object. + const ObjectFilter({ + required this.property, + required this.filter, + }) : super._(); + + /// Property containing the embedded object(s). + final String property; + + /// Filter condition that should be applied + final FilterOperation filter; +} + +/// Filter condition based on a link. +class LinkFilter extends FilterOperation { + /// Create a filter condition based on a link. + const LinkFilter({ + required this.linkName, + required FilterOperation this.filter, + }) : lower = null, + upper = null, + super._(); + + /// Create a filter condition based on the number of linked objects. + const LinkFilter.length({ + required this.linkName, + required int this.lower, + required int this.upper, + }) : filter = null, + assert(lower >= 0 && upper >= 0, 'Link length must be positive.'), + super._(); + + /// Isar name of the link. + final String linkName; + + /// Filter condition that should be applied + final FilterOperation? filter; + + /// The minumum number of linked objects + final int? lower; + + /// The maximum number of linked objects + final int? upper; +} diff --git a/lib/src/schema/collection_schema.dart b/lib/src/schema/collection_schema.dart new file mode 100644 index 0000000..24d280a --- /dev/null +++ b/lib/src/schema/collection_schema.dart @@ -0,0 +1,152 @@ +part of isar; + +/// This schema represents a collection. +class CollectionSchema extends Schema { + /// @nodoc + @protected + const CollectionSchema({ + required super.id, + required super.name, + required super.properties, + required super.estimateSize, + required super.serialize, + required super.deserialize, + required super.deserializeProp, + required this.idName, + required this.indexes, + required this.links, + required this.embeddedSchemas, + required this.getId, + required this.getLinks, + required this.attach, + required this.version, + }) : assert( + Isar.version == version, + 'Outdated generated code. Please re-run code ' + 'generation using the latest generator.', + ); + + /// @nodoc + @protected + factory CollectionSchema.fromJson(Map json) { + final collection = Schema.fromJson(json); + return CollectionSchema( + id: collection.id, + name: collection.name, + properties: collection.properties, + idName: json['idName'] as String, + indexes: { + for (final index in json['indexes'] as List) + (index as Map)['name'] as String: + IndexSchema.fromJson(index), + }, + links: { + for (final link in json['links'] as List) + (link as Map)['name'] as String: + LinkSchema.fromJson(link), + }, + embeddedSchemas: { + for (final schema in json['embeddedSchemas'] as List) + (schema as Map)['name'] as String: + Schema.fromJson(schema), + }, + estimateSize: (_, __, ___) => throw UnimplementedError(), + serialize: (_, __, ___, ____) => throw UnimplementedError(), + deserialize: (_, __, ___, ____) => throw UnimplementedError(), + deserializeProp: (_, __, ___, ____) => throw UnimplementedError(), + getId: (_) => throw UnimplementedError(), + getLinks: (_) => throw UnimplementedError(), + attach: (_, __, ___) => throw UnimplementedError(), + version: Isar.version, + ); + } + + /// Name of the id property + final String idName; + + @override + bool get embedded => false; + + /// A map of name -> index pairs + final Map indexes; + + /// A map of name -> link pairs + final Map links; + + /// A map of name -> embedded schema pairs + final Map> embeddedSchemas; + + /// @nodoc + final GetId getId; + + /// @nodoc + final GetLinks getLinks; + + /// @nodoc + final Attach attach; + + /// @nodoc + final String version; + + /// @nodoc + void toCollection(void Function() callback) => callback(); + + /// @nodoc + @pragma('vm:prefer-inline') + IndexSchema index(String indexName) { + final index = indexes[indexName]; + if (index != null) { + return index; + } else { + throw IsarError('Unknown index "$indexName"'); + } + } + + /// @nodoc + @pragma('vm:prefer-inline') + LinkSchema link(String linkName) { + final link = links[linkName]; + if (link != null) { + return link; + } else { + throw IsarError('Unknown link "$linkName"'); + } + } + + /// @nodoc + @protected + @override + Map toJson() { + final json = { + ...super.toJson(), + 'idName': idName, + 'indexes': [ + for (final index in indexes.values) index.toJson(), + ], + 'links': [ + for (final link in links.values) link.toJson(), + ], + }; + + assert(() { + json['embeddedSchemas'] = [ + for (final schema in embeddedSchemas.values) schema.toJson(), + ]; + return true; + }()); + + return json; + } +} + +/// @nodoc +@protected +typedef GetId = Id Function(T object); + +/// @nodoc +@protected +typedef GetLinks = List> Function(T object); + +/// @nodoc +@protected +typedef Attach = void Function(IsarCollection col, Id id, T object); diff --git a/lib/src/schema/index_schema.dart b/lib/src/schema/index_schema.dart new file mode 100644 index 0000000..d8ceb8b --- /dev/null +++ b/lib/src/schema/index_schema.dart @@ -0,0 +1,104 @@ +part of isar; + +/// This schema represents an index. +class IndexSchema { + /// @nodoc + @protected + const IndexSchema({ + required this.id, + required this.name, + required this.unique, + required this.replace, + required this.properties, + }); + + /// @nodoc + @protected + factory IndexSchema.fromJson(Map json) { + return IndexSchema( + id: -1, + name: json['name'] as String, + unique: json['unique'] as bool, + replace: json['replace'] as bool, + properties: (json['properties'] as List) + .map((e) => IndexPropertySchema.fromJson(e as Map)) + .toList(), + ); + } + + /// Internal id of this index. + final int id; + + /// Name of this index. + final String name; + + /// Whether duplicates are disallowed in this index. + final bool unique; + + /// Whether duplocates will be replaced or throw an error. + final bool replace; + + /// Composite properties. + final List properties; + + /// @nodoc + @protected + Map toJson() { + final json = { + 'name': name, + 'unique': unique, + 'replace': replace, + 'properties': [ + for (final property in properties) property.toJson(), + ], + }; + + return json; + } +} + +/// This schema represents a composite index property. +class IndexPropertySchema { + /// @nodoc + @protected + const IndexPropertySchema({ + required this.name, + required this.type, + required this.caseSensitive, + }); + + /// @nodoc + @protected + factory IndexPropertySchema.fromJson(Map json) { + return IndexPropertySchema( + name: json['name'] as String, + type: IndexType.values.firstWhere((e) => _typeName[e] == json['type']), + caseSensitive: json['caseSensitive'] as bool, + ); + } + + /// Isar name of the property. + final String name; + + /// Type of index. + final IndexType type; + + /// Whether String properties should be stored with casing. + final bool caseSensitive; + + /// @nodoc + @protected + Map toJson() { + return { + 'name': name, + 'type': _typeName[type], + 'caseSensitive': caseSensitive, + }; + } + + static const _typeName = { + IndexType.value: 'Value', + IndexType.hash: 'Hash', + IndexType.hashElements: 'HashElements', + }; +} diff --git a/lib/src/schema/link_schema.dart b/lib/src/schema/link_schema.dart new file mode 100644 index 0000000..046ed64 --- /dev/null +++ b/lib/src/schema/link_schema.dart @@ -0,0 +1,64 @@ +part of isar; + +/// This schema represents a link to the same or another collection. +class LinkSchema { + /// @nodoc + @protected + const LinkSchema({ + required this.id, + required this.name, + required this.target, + required this.single, + this.linkName, + }); + + /// @nodoc + @protected + factory LinkSchema.fromJson(Map json) { + return LinkSchema( + id: -1, + name: json['name'] as String, + target: json['target'] as String, + single: json['single'] as bool, + linkName: json['linkName'] as String?, + ); + } + + /// Internal id of this link. + final int id; + + /// Name of this link. + final String name; + + /// Isar name of the target collection. + final String target; + + /// Whether this is link can only hold a single target object. + final bool single; + + /// If this is a backlink, [linkName] is the name of the source link in the + /// [target] collection. + final String? linkName; + + /// Whether this link is a backlink. + bool get isBacklink => linkName != null; + + /// @nodoc + @protected + Map toJson() { + final json = { + 'name': name, + 'target': target, + 'single': single, + }; + + assert(() { + if (linkName != null) { + json['linkName'] = linkName; + } + return true; + }()); + + return json; + } +} diff --git a/lib/src/schema/property_schema.dart b/lib/src/schema/property_schema.dart new file mode 100644 index 0000000..9f34c13 --- /dev/null +++ b/lib/src/schema/property_schema.dart @@ -0,0 +1,183 @@ +part of isar; + +/// A single propery of a collection or embedded object. +class PropertySchema { + /// @nodoc + @protected + const PropertySchema({ + required this.id, + required this.name, + required this.type, + this.enumMap, + this.target, + }); + + /// @nodoc + @protected + factory PropertySchema.fromJson(Map json) { + return PropertySchema( + id: -1, + name: json['name'] as String, + type: IsarType.values.firstWhere((e) => e.schemaName == json['type']), + enumMap: json['enumMap'] as Map?, + target: json['target'] as String?, + ); + } + + /// Internal id of this property. + final int id; + + /// Name of the property + final String name; + + /// Isar type of the property + final IsarType type; + + /// Maps enum names to database values + final Map? enumMap; + + /// For embedded objects: Name of the target schema + final String? target; + + /// @nodoc + @protected + Map toJson() { + final json = { + 'name': name, + 'type': type.schemaName, + if (target != null) 'target': target, + }; + + assert(() { + if (enumMap != null) { + json['enumMap'] = enumMap; + } + return true; + }()); + + return json; + } +} + +/// Supported Isar types +enum IsarType { + /// Boolean + bool('Bool'), + + /// 8-bit unsigned integer + byte('Byte'), + + /// 32-bit singed integer + int('Int'), + + /// 32-bit float + float('Float'), + + /// 64-bit singed integer + long('Long'), + + /// 64-bit float + double('Double'), + + /// DateTime + dateTime('DateTime'), + + /// String + string('String'), + + /// Embedded object + object('Object'), + + /// Boolean list + boolList('BoolList'), + + /// 8-bit unsigned integer list + byteList('ByteList'), + + /// 32-bit singed integer list + intList('IntList'), + + /// 32-bit float list + floatList('FloatList'), + + /// 64-bit singed integer list + longList('LongList'), + + /// 64-bit float list + doubleList('DoubleList'), + + /// DateTime list + dateTimeList('DateTimeList'), + + /// String list + stringList('StringList'), + + /// Embedded object list + objectList('ObjectList'); + + /// @nodoc + const IsarType(this.schemaName); + + /// @nodoc + final String schemaName; +} + +/// @nodoc +extension IsarTypeX on IsarType { + /// Whether this type represents a list + bool get isList => index >= IsarType.boolList.index; + + /// @nodoc + IsarType get scalarType { + switch (this) { + case IsarType.boolList: + return IsarType.bool; + case IsarType.byteList: + return IsarType.byte; + case IsarType.intList: + return IsarType.int; + case IsarType.floatList: + return IsarType.float; + case IsarType.longList: + return IsarType.long; + case IsarType.doubleList: + return IsarType.double; + case IsarType.dateTimeList: + return IsarType.dateTime; + case IsarType.stringList: + return IsarType.string; + case IsarType.objectList: + return IsarType.object; + // ignore: no_default_cases + default: + return this; + } + } + + /// @nodoc + IsarType get listType { + switch (this) { + case IsarType.bool: + return IsarType.boolList; + case IsarType.byte: + return IsarType.byteList; + case IsarType.int: + return IsarType.intList; + case IsarType.float: + return IsarType.floatList; + case IsarType.long: + return IsarType.longList; + case IsarType.double: + return IsarType.doubleList; + case IsarType.dateTime: + return IsarType.dateTimeList; + case IsarType.string: + return IsarType.stringList; + case IsarType.object: + return IsarType.objectList; + // ignore: no_default_cases + default: + return this; + } + } +} diff --git a/lib/src/schema/schema.dart b/lib/src/schema/schema.dart new file mode 100644 index 0000000..8cdf73a --- /dev/null +++ b/lib/src/schema/schema.dart @@ -0,0 +1,126 @@ +part of isar; + +/// This schema either represents a collection or embedded object. +class Schema { + /// @nodoc + @protected + const Schema({ + required this.id, + required this.name, + required this.properties, + required this.estimateSize, + required this.serialize, + required this.deserialize, + required this.deserializeProp, + }); + + /// @nodoc + @protected + factory Schema.fromJson(Map json) { + return Schema( + id: -1, + name: json['name'] as String, + properties: { + for (final property in json['properties'] as List) + (property as Map)['name'] as String: + PropertySchema.fromJson(property), + }, + estimateSize: (_, __, ___) => throw UnimplementedError(), + serialize: (_, __, ___, ____) => throw UnimplementedError(), + deserialize: (_, __, ___, ____) => throw UnimplementedError(), + deserializeProp: (_, __, ___, ____) => throw UnimplementedError(), + ); + } + + /// Internal id of this collection or embedded object. + final int id; + + /// Name of the collection or embedded object + final String name; + + /// Whether this is an embedded object + bool get embedded => true; + + /// A map of name -> property pairs + final Map properties; + + /// @nodoc + @protected + final EstimateSize estimateSize; + + /// @nodoc + @protected + final Serialize serialize; + + /// @nodoc + @protected + final Deserialize deserialize; + + /// @nodoc + @protected + final DeserializeProp deserializeProp; + + /// Returns a property by its name or throws an error. + @pragma('vm:prefer-inline') + PropertySchema property(String propertyName) { + final property = properties[propertyName]; + if (property != null) { + return property; + } else { + throw IsarError('Unknown property "$propertyName"'); + } + } + + /// @nodoc + @protected + Map toJson() { + final json = { + 'name': name, + 'embedded': embedded, + 'properties': [ + for (final property in properties.values) property.toJson(), + ], + }; + + return json; + } + + /// @nodoc + @protected + Type get type => OBJ; +} + +/// @nodoc +@protected +typedef EstimateSize = int Function( + T object, + List offsets, + Map> allOffsets, +); + +/// @nodoc +@protected +typedef Serialize = void Function( + T object, + IsarWriter writer, + List offsets, + Map> allOffsets, +); + +/// @nodoc +@protected +typedef Deserialize = T Function( + Id id, + IsarReader reader, + List offsets, + Map> allOffsets, +); + +/// @nodoc +@protected +typedef DeserializeProp = dynamic Function( + IsarReader reader, + int propertyId, + int offset, + Map> allOffsets, +); diff --git a/lib/src/web/bindings.dart b/lib/src/web/bindings.dart new file mode 100644 index 0000000..d4a1b76 --- /dev/null +++ b/lib/src/web/bindings.dart @@ -0,0 +1,188 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:indexed_db'; +import 'dart:js'; + +import 'package:isar/isar.dart'; +import 'package:js/js.dart'; +import 'package:js/js_util.dart'; + +@JS('JSON.stringify') +external String stringify(dynamic value); + +@JS('indexedDB.cmp') +external int idbCmp(dynamic value1, dynamic value2); + +@JS('Object.keys') +external List objectKeys(dynamic obj); + +Map jsMapToDart(Object obj) { + final keys = objectKeys(obj); + final map = {}; + for (final key in keys) { + map[key] = getProperty(obj, key); + } + return map; +} + +@JS('Promise') +class Promise {} + +extension PromiseX on Promise { + Future wait() => promiseToFuture(this); +} + +@JS('openIsar') +external Promise openIsarJs( + String name, + List schemas, + bool relaxedDurability, +); + +@JS('IsarTxn') +class IsarTxnJs { + external Promise commit(); + + external void abort(); + + external bool get write; +} + +@JS('IsarInstance') +class IsarInstanceJs { + external IsarTxnJs beginTxn(bool write); + + external IsarCollectionJs getCollection(String name); + + external Promise close(bool deleteFromDisk); +} + +typedef ChangeCallbackJs = void Function(); + +typedef ObjectChangeCallbackJs = void Function(Object? object); + +typedef QueryChangeCallbackJs = void Function(List results); + +typedef StopWatchingJs = JsFunction; + +@JS('IsarCollection') +class IsarCollectionJs { + external IsarLinkJs getLink(String name); + + external Promise getAll(IsarTxnJs txn, List ids); + + external Promise getAllByIndex( + IsarTxnJs txn, + String indexName, + List> values, + ); + + external Promise putAll(IsarTxnJs txn, List objects); + + external Promise deleteAll(IsarTxnJs txn, List ids); + + external Promise deleteAllByIndex( + IsarTxnJs txn, + String indexName, + List keys, + ); + + external Promise clear(IsarTxnJs txn); + + external StopWatchingJs watchLazy(ChangeCallbackJs callback); + + external StopWatchingJs watchObject(Id id, ObjectChangeCallbackJs callback); + + external StopWatchingJs watchQuery( + QueryJs query, + QueryChangeCallbackJs callback, + ); + + external StopWatchingJs watchQueryLazy( + QueryJs query, + ChangeCallbackJs callback, + ); +} + +@JS('IsarLink') +class IsarLinkJs { + external Promise update( + IsarTxnJs txn, + bool backlink, + Id id, + List addedTargets, + List deletedTargets, + ); + + external Promise clear(IsarTxnJs txn, Id id, bool backlink); +} + +@JS('IdWhereClause') +@anonymous +class IdWhereClauseJs { + external KeyRange? range; +} + +@JS('IndexWhereClause') +@anonymous +class IndexWhereClauseJs { + external String indexName; + external KeyRange? range; +} + +@JS('LinkWhereClause') +@anonymous +class LinkWhereClauseJs { + external String linkCollection; + external String linkName; + external bool backlink; + external Id id; +} + +@JS('Function') +class FilterJs { + external FilterJs(String id, String obj, String method); +} + +@JS('Function') +class SortCmpJs { + external SortCmpJs(String a, String b, String method); +} + +@JS('Function') +class DistinctValueJs { + external DistinctValueJs(String obj, String method); +} + +@JS('IsarQuery') +class QueryJs { + external QueryJs( + IsarCollectionJs collection, + List whereClauses, + bool whereDistinct, + bool whereAscending, + FilterJs? filter, + SortCmpJs? sortCmp, + DistinctValueJs? distinctValue, + int? offset, + int? limit, + ); + + external Promise findFirst(IsarTxnJs txn); + + external Promise findAll(IsarTxnJs txn); + + external Promise deleteFirst(IsarTxnJs txn); + + external Promise deleteAll(IsarTxnJs txn); + + external Promise min(IsarTxnJs txn, String propertyName); + + external Promise max(IsarTxnJs txn, String propertyName); + + external Promise sum(IsarTxnJs txn, String propertyName); + + external Promise average(IsarTxnJs txn, String propertyName); + + external Promise count(IsarTxnJs txn); +} diff --git a/lib/src/web/isar_collection_impl.dart b/lib/src/web/isar_collection_impl.dart new file mode 100644 index 0000000..d89ad69 --- /dev/null +++ b/lib/src/web/isar_collection_impl.dart @@ -0,0 +1,266 @@ +// ignore_for_file: public_member_api_docs, invalid_use_of_protected_member + +import 'dart:async'; +import 'dart:convert'; +import 'dart:js'; +import 'dart:js_util'; +import 'dart:typed_data'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/web/bindings.dart'; +import 'package:isar/src/web/isar_impl.dart'; +import 'package:isar/src/web/isar_reader_impl.dart'; +import 'package:isar/src/web/isar_web.dart'; +import 'package:isar/src/web/isar_writer_impl.dart'; +import 'package:isar/src/web/query_build.dart'; +import 'package:meta/dart2js.dart'; + +class IsarCollectionImpl extends IsarCollection { + IsarCollectionImpl({ + required this.isar, + required this.native, + required this.schema, + }); + + @override + final IsarImpl isar; + final IsarCollectionJs native; + + @override + final CollectionSchema schema; + + @override + String get name => schema.name; + + late final _offsets = isar.offsets[OBJ]!; + + @tryInline + OBJ deserializeObject(Object object) { + final id = getProperty(object, idName); + final reader = IsarReaderImpl(object); + return schema.deserialize(id, reader, _offsets, isar.offsets); + } + + @tryInline + List deserializeObjects(dynamic objects) { + final list = objects as List; + final results = []; + for (final object in list) { + results.add(object is Object ? deserializeObject(object) : null); + } + return results; + } + + @override + Future> getAll(List ids) { + return isar.getTxn(false, (IsarTxnJs txn) async { + final objects = await native.getAll(txn, ids).wait>(); + return deserializeObjects(objects); + }); + } + + @override + Future> getAllByIndex(String indexName, List keys) { + return isar.getTxn(false, (IsarTxnJs txn) async { + final objects = await native + .getAllByIndex(txn, indexName, keys) + .wait>(); + return deserializeObjects(objects); + }); + } + + @override + List getAllSync(List ids) => unsupportedOnWeb(); + + @override + List getAllByIndexSync(String indexName, List keys) => + unsupportedOnWeb(); + + @override + Future> putAll(List objects) { + return putAllByIndex(null, objects); + } + + @override + List putAllSync(List objects, {bool saveLinks = true}) => + unsupportedOnWeb(); + + @override + Future> putAllByIndex(String? indexName, List objects) { + return isar.getTxn(true, (IsarTxnJs txn) async { + final serialized = []; + for (final object in objects) { + final jsObj = newObject(); + final writer = IsarWriterImpl(jsObj); + schema.serialize(object, writer, _offsets, isar.offsets); + setProperty(jsObj, idName, schema.getId(object)); + serialized.add(jsObj); + } + final ids = await native.putAll(txn, serialized).wait>(); + for (var i = 0; i < objects.length; i++) { + final object = objects[i]; + final id = ids[i] as Id; + schema.attach(this, id, object); + } + + return ids.cast().toList(); + }); + } + + @override + List putAllByIndexSync( + String indexName, + List objects, { + bool saveLinks = true, + }) => + unsupportedOnWeb(); + + @override + Future deleteAll(List ids) async { + await isar.getTxn(true, (IsarTxnJs txn) { + return native.deleteAll(txn, ids).wait(); + }); + return ids.length; + } + + @override + Future deleteAllByIndex(String indexName, List keys) { + return isar.getTxn(true, (IsarTxnJs txn) { + return native.deleteAllByIndex(txn, indexName, keys).wait(); + }); + } + + @override + int deleteAllSync(List ids) => unsupportedOnWeb(); + + @override + int deleteAllByIndexSync(String indexName, List keys) => + unsupportedOnWeb(); + + @override + Future clear() { + return isar.getTxn(true, (IsarTxnJs txn) { + return native.clear(txn).wait(); + }); + } + + @override + void clearSync() => unsupportedOnWeb(); + + @override + Future importJson(List> json) { + return isar.getTxn(true, (IsarTxnJs txn) async { + await native.putAll(txn, json.map(jsify).toList()).wait(); + }); + } + + @override + Future importJsonRaw(Uint8List jsonBytes) { + final json = jsonDecode(const Utf8Decoder().convert(jsonBytes)) as List; + return importJson(json.cast()); + } + + @override + void importJsonSync(List> json) => unsupportedOnWeb(); + + @override + void importJsonRawSync(Uint8List jsonBytes) => unsupportedOnWeb(); + + @override + Future count() => where().count(); + + @override + int countSync() => unsupportedOnWeb(); + + @override + Future getSize({ + bool includeIndexes = false, + bool includeLinks = false, + }) => + unsupportedOnWeb(); + + @override + int getSizeSync({ + bool includeIndexes = false, + bool includeLinks = false, + }) => + unsupportedOnWeb(); + + @override + Stream watchLazy({bool fireImmediately = false}) { + JsFunction? stop; + final controller = StreamController( + onCancel: () { + stop?.apply([]); + }, + ); + + final void Function() callback = allowInterop(() => controller.add(null)); + stop = native.watchLazy(callback); + + return controller.stream; + } + + @override + Stream watchObject( + Id id, { + bool fireImmediately = false, + bool deserialize = true, + }) { + JsFunction? stop; + final controller = StreamController( + onCancel: () { + stop?.apply([]); + }, + ); + + final Null Function(Object? obj) callback = allowInterop((Object? obj) { + final object = deserialize && obj != null ? deserializeObject(obj) : null; + controller.add(object); + }); + stop = native.watchObject(id, callback); + + return controller.stream; + } + + @override + Stream watchObjectLazy(Id id, {bool fireImmediately = false}) => + watchObject(id, deserialize: false); + + @override + Query buildQuery({ + List whereClauses = const [], + bool whereDistinct = false, + Sort whereSort = Sort.asc, + FilterOperation? filter, + List sortBy = const [], + List distinctBy = const [], + int? offset, + int? limit, + String? property, + }) { + return buildWebQuery( + this, + whereClauses, + whereDistinct, + whereSort, + filter, + sortBy, + distinctBy, + offset, + limit, + property, + ); + } + + @override + Future verify(List objects) => unsupportedOnWeb(); + + @override + Future verifyLink( + String linkName, + List sourceIds, + List targetIds, + ) => + unsupportedOnWeb(); +} diff --git a/lib/src/web/isar_impl.dart b/lib/src/web/isar_impl.dart new file mode 100644 index 0000000..5c0efb4 --- /dev/null +++ b/lib/src/web/isar_impl.dart @@ -0,0 +1,135 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:async'; +import 'dart:html'; + +import 'package:isar/isar.dart'; + +import 'package:isar/src/web/bindings.dart'; +import 'package:isar/src/web/isar_web.dart'; + +const Symbol _zoneTxn = #zoneTxn; + +class IsarImpl extends Isar { + IsarImpl(super.name, this.instance); + + final IsarInstanceJs instance; + final offsets = >{}; + final List> _activeAsyncTxns = []; + + @override + final String? directory = null; + + void requireNotInTxn() { + if (Zone.current[_zoneTxn] != null) { + throw IsarError( + 'Cannot perform this operation from within an active transaction.', + ); + } + } + + Future _txn( + bool write, + bool silent, + Future Function() callback, + ) async { + requireOpen(); + requireNotInTxn(); + + final completer = Completer(); + _activeAsyncTxns.add(completer.future); + + final txn = instance.beginTxn(write); + + final zone = Zone.current.fork( + zoneValues: {_zoneTxn: txn}, + ); + + T result; + try { + result = await zone.run(callback); + await txn.commit().wait(); + } catch (e) { + txn.abort(); + if (e is DomException) { + if (e.name == DomException.CONSTRAINT) { + throw IsarUniqueViolationError(); + } else { + throw IsarError('${e.name}: ${e.message}'); + } + } else { + rethrow; + } + } finally { + completer.complete(); + _activeAsyncTxns.remove(completer.future); + } + + return result; + } + + @override + Future txn(Future Function() callback) { + return _txn(false, false, callback); + } + + @override + Future writeTxn(Future Function() callback, {bool silent = false}) { + return _txn(true, silent, callback); + } + + @override + T txnSync(T Function() callback) => unsupportedOnWeb(); + + @override + T writeTxnSync(T Function() callback, {bool silent = false}) => + unsupportedOnWeb(); + + Future getTxn(bool write, Future Function(IsarTxnJs txn) callback) { + final currentTxn = Zone.current[_zoneTxn] as IsarTxnJs?; + if (currentTxn != null) { + if (write && !currentTxn.write) { + throw IsarError( + 'Operation cannot be performed within a read transaction.', + ); + } + return callback(currentTxn); + } else if (!write) { + return _txn(false, false, () { + return callback(Zone.current[_zoneTxn] as IsarTxnJs); + }); + } else { + throw IsarError('Write operations require an explicit transaction.'); + } + } + + @override + Future getSize({ + bool includeIndexes = false, + bool includeLinks = false, + }) => + unsupportedOnWeb(); + + @override + int getSizeSync({ + bool includeIndexes = false, + bool includeLinks = false, + }) => + unsupportedOnWeb(); + + @override + Future copyToFile(String targetPath) => unsupportedOnWeb(); + + @override + Future close({bool deleteFromDisk = false}) async { + requireOpen(); + requireNotInTxn(); + await Future.wait(_activeAsyncTxns); + await super.close(); + await instance.close(deleteFromDisk).wait(); + return true; + } + + @override + Future verify() => unsupportedOnWeb(); +} diff --git a/lib/src/web/isar_link_impl.dart b/lib/src/web/isar_link_impl.dart new file mode 100644 index 0000000..6efa9c1 --- /dev/null +++ b/lib/src/web/isar_link_impl.dart @@ -0,0 +1,75 @@ +// ignore_for_file: public_member_api_docs + +import 'package:isar/isar.dart'; +import 'package:isar/src/common/isar_link_base_impl.dart'; +import 'package:isar/src/common/isar_link_common.dart'; +import 'package:isar/src/common/isar_links_common.dart'; +import 'package:isar/src/web/bindings.dart'; +import 'package:isar/src/web/isar_collection_impl.dart'; +import 'package:isar/src/web/isar_web.dart'; + +mixin IsarLinkBaseMixin on IsarLinkBaseImpl { + @override + IsarCollectionImpl get sourceCollection => + super.sourceCollection as IsarCollectionImpl; + + @override + IsarCollectionImpl get targetCollection => + super.targetCollection as IsarCollectionImpl; + + @override + late final Id Function(OBJ) getId = targetCollection.schema.getId; + + late final String? backlinkLinkName = + sourceCollection.schema.link(linkName).linkName; + + late final IsarLinkJs jsLink = backlinkLinkName != null + ? targetCollection.native.getLink(backlinkLinkName!) + : sourceCollection.native.getLink(linkName); + + @override + Future update({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }) { + final linkList = link.toList(); + final unlinkList = unlink.toList(); + + final containingId = requireAttached(); + final backlink = backlinkLinkName != null; + + final linkIds = List.filled(linkList.length, 0); + for (var i = 0; i < linkList.length; i++) { + linkIds[i] = requireGetId(linkList[i]); + } + + final unlinkIds = List.filled(unlinkList.length, 0); + for (var i = 0; i < unlinkList.length; i++) { + unlinkIds[i] = requireGetId(unlinkList[i]); + } + + return targetCollection.isar.getTxn(true, (IsarTxnJs txn) async { + if (reset) { + await jsLink.clear(txn, containingId, backlink).wait(); + } + return jsLink + .update(txn, backlink, containingId, linkIds, unlinkIds) + .wait(); + }); + } + + @override + void updateSync({ + Iterable link = const [], + Iterable unlink = const [], + bool reset = false, + }) => + unsupportedOnWeb(); +} + +class IsarLinkImpl extends IsarLinkCommon + with IsarLinkBaseMixin {} + +class IsarLinksImpl extends IsarLinksCommon + with IsarLinkBaseMixin {} diff --git a/lib/src/web/isar_reader_impl.dart b/lib/src/web/isar_reader_impl.dart new file mode 100644 index 0000000..fac7e8b --- /dev/null +++ b/lib/src/web/isar_reader_impl.dart @@ -0,0 +1,347 @@ +// ignore_for_file: public_member_api_docs + +import 'package:isar/isar.dart'; +import 'package:js/js_util.dart'; +import 'package:meta/dart2js.dart'; + +const nullNumber = double.negativeInfinity; +const idName = '_id'; +final nullDate = DateTime.fromMillisecondsSinceEpoch(0); + +class IsarReaderImpl implements IsarReader { + IsarReaderImpl(this.object); + + final Object object; + + @tryInline + @override + bool readBool(int offset) { + final value = getProperty(object, offset); + return value == 1; + } + + @tryInline + @override + bool? readBoolOrNull(int offset) { + final value = getProperty(object, offset); + return value == 0 + ? false + : value == 1 + ? true + : null; + } + + @tryInline + @override + int readByte(int offset) { + final value = getProperty(object, offset); + return value is int ? value : nullNumber as int; + } + + @tryInline + @override + int? readByteOrNull(int offset) { + final value = getProperty(object, offset); + return value is int && value != nullNumber ? value : null; + } + + @tryInline + @override + int readInt(int offset) { + final value = getProperty(object, offset); + return value is int ? value : nullNumber as int; + } + + @tryInline + @override + int? readIntOrNull(int offset) { + final value = getProperty(object, offset); + return value is int && value != nullNumber ? value : null; + } + + @tryInline + @override + double readFloat(int offset) { + final value = getProperty(object, offset); + return value is double ? value : nullNumber; + } + + @tryInline + @override + double? readFloatOrNull(int offset) { + final value = getProperty(object, offset); + return value is double && value != nullNumber ? value : null; + } + + @tryInline + @override + int readLong(int offset) { + final value = getProperty(object, offset); + return value is int ? value : nullNumber as int; + } + + @tryInline + @override + int? readLongOrNull(int offset) { + final value = getProperty(object, offset); + return value is int && value != nullNumber ? value : null; + } + + @tryInline + @override + double readDouble(int offset) { + final value = getProperty(object, offset); + return value is double && value != nullNumber ? value : nullNumber; + } + + @tryInline + @override + double? readDoubleOrNull(int offset) { + final value = getProperty(object, offset); + return value is double && value != nullNumber ? value : null; + } + + @tryInline + @override + DateTime readDateTime(int offset) { + final value = getProperty(object, offset); + return value is int && value != nullNumber + ? DateTime.fromMillisecondsSinceEpoch(value, isUtc: true).toLocal() + : nullDate; + } + + @tryInline + @override + DateTime? readDateTimeOrNull(int offset) { + final value = getProperty(object, offset); + return value is int && value != nullNumber + ? DateTime.fromMillisecondsSinceEpoch(value, isUtc: true).toLocal() + : null; + } + + @tryInline + @override + String readString(int offset) { + final value = getProperty(object, offset); + return value is String ? value : ''; + } + + @tryInline + @override + String? readStringOrNull(int offset) { + final value = getProperty(object, offset); + return value is String ? value : null; + } + + @tryInline + @override + T? readObjectOrNull( + int offset, + Deserialize deserialize, + Map> allOffsets, + ) { + final value = getProperty(object, offset); + if (value is Object) { + final reader = IsarReaderImpl(value); + return deserialize(0, reader, allOffsets[T]!, allOffsets); + } else { + return null; + } + } + + @tryInline + @override + List? readBoolList(int offset) { + final value = getProperty(object, offset); + return value is List ? value.map((e) => e == 1).toList() : null; + } + + @tryInline + @override + List? readBoolOrNullList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value + .map( + (e) => e == 0 + ? false + : e == 1 + ? true + : null, + ) + .toList() + : null; + } + + @tryInline + @override + List? readByteList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is int ? e : nullNumber as int).toList() + : null; + } + + @tryInline + @override + List? readIntList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is int ? e : nullNumber as int).toList() + : null; + } + + @tryInline + @override + List? readIntOrNullList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is int && e != nullNumber ? e : null).toList() + : null; + } + + @tryInline + @override + List? readFloatList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is double ? e : nullNumber).toList() + : null; + } + + @tryInline + @override + List? readFloatOrNullList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is double && e != nullNumber ? e : null).toList() + : null; + } + + @tryInline + @override + List? readLongList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is int ? e : nullNumber as int).toList() + : null; + } + + @tryInline + @override + List? readLongOrNullList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is int && e != nullNumber ? e : null).toList() + : null; + } + + @tryInline + @override + List? readDoubleList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is double ? e : nullNumber).toList() + : null; + } + + @tryInline + @override + List? readDoubleOrNullList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is double && e != nullNumber ? e : null).toList() + : null; + } + + @tryInline + @override + List? readDateTimeList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value + .map( + (e) => e is int && e != nullNumber + ? DateTime.fromMillisecondsSinceEpoch(e, isUtc: true) + .toLocal() + : nullDate, + ) + .toList() + : null; + } + + @tryInline + @override + List? readDateTimeOrNullList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value + .map( + (e) => e is int && e != nullNumber + ? DateTime.fromMillisecondsSinceEpoch(e, isUtc: true) + .toLocal() + : null, + ) + .toList() + : null; + } + + @tryInline + @override + List? readStringList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is String ? e : '').toList() + : null; + } + + @tryInline + @override + List? readStringOrNullList(int offset) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) => e is String ? e : null).toList() + : null; + } + + @tryInline + @override + List? readObjectList( + int offset, + Deserialize deserialize, + Map> allOffsets, + T defaultValue, + ) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) { + if (e is Object) { + final reader = IsarReaderImpl(e); + return deserialize(0, reader, allOffsets[T]!, allOffsets); + } else { + return defaultValue; + } + }).toList() + : null; + } + + @tryInline + @override + List? readObjectOrNullList( + int offset, + Deserialize deserialize, + Map> allOffsets, + ) { + final value = getProperty(object, offset); + return value is List + ? value.map((e) { + if (e is Object) { + final reader = IsarReaderImpl(e); + return deserialize(0, reader, allOffsets[T]!, allOffsets); + } else { + return null; + } + }).toList() + : null; + } +} diff --git a/lib/src/web/isar_web.dart b/lib/src/web/isar_web.dart new file mode 100644 index 0000000..0cff278 --- /dev/null +++ b/lib/src/web/isar_web.dart @@ -0,0 +1,48 @@ +// ignore_for_file: unused_field, public_member_api_docs + +import 'dart:async'; + +import 'package:isar/isar.dart'; +import 'package:meta/meta.dart'; + +/// @nodoc +@protected +const Id isarMinId = -9007199254740990; + +/// @nodoc +@protected +const Id isarMaxId = 9007199254740991; + +/// @nodoc +@protected +const Id isarAutoIncrementId = -9007199254740991; + +/// @nodoc +Never unsupportedOnWeb() { + throw UnsupportedError('This operation is not supported for Isar web'); +} + +class _WebAbi { + static const androidArm = null as dynamic; + static const androidArm64 = null as dynamic; + static const androidIA32 = null as dynamic; + static const androidX64 = null as dynamic; + static const iosArm64 = null as dynamic; + static const iosX64 = null as dynamic; + static const linuxArm64 = null as dynamic; + static const linuxX64 = null as dynamic; + static const macosArm64 = null as dynamic; + static const macosX64 = null as dynamic; + static const windowsArm64 = null as dynamic; + static const windowsX64 = null as dynamic; +} + +/// @nodoc +@protected +typedef IsarAbi = _WebAbi; + +FutureOr initializeCoreBinary({ + Map libraries = const {}, + bool download = false, +}) => + unsupportedOnWeb(); diff --git a/lib/src/web/isar_writer_impl.dart b/lib/src/web/isar_writer_impl.dart new file mode 100644 index 0000000..a4a65ca --- /dev/null +++ b/lib/src/web/isar_writer_impl.dart @@ -0,0 +1,171 @@ +// ignore_for_file: public_member_api_docs + +import 'package:isar/isar.dart'; +import 'package:isar/src/web/isar_reader_impl.dart'; +import 'package:js/js_util.dart'; +import 'package:meta/dart2js.dart'; + +class IsarWriterImpl implements IsarWriter { + IsarWriterImpl(this.object); + + final Object object; + + @tryInline + @override + void writeBool(int offset, bool? value) { + final number = value == true + ? 1 + : value == false + ? 0 + : nullNumber; + setProperty(object, offset, number); + } + + @tryInline + @override + void writeByte(int offset, int value) { + setProperty(object, offset, value); + } + + @tryInline + @override + void writeInt(int offset, int? value) { + setProperty(object, offset, value ?? nullNumber); + } + + @tryInline + @override + void writeFloat(int offset, double? value) { + setProperty(object, offset, value ?? nullNumber); + } + + @tryInline + @override + void writeLong(int offset, int? value) { + setProperty(object, offset, value ?? nullNumber); + } + + @tryInline + @override + void writeDouble(int offset, double? value) { + setProperty(object, offset, value ?? nullNumber); + } + + @tryInline + @override + void writeDateTime(int offset, DateTime? value) { + setProperty( + object, + offset, + value?.toUtc().millisecondsSinceEpoch ?? nullNumber, + ); + } + + @tryInline + @override + void writeString(int offset, String? value) { + setProperty(object, offset, value ?? nullNumber); + } + + @tryInline + @override + void writeObject( + int offset, + Map> allOffsets, + Serialize serialize, + T? value, + ) { + if (value != null) { + final object = newObject(); + final writer = IsarWriterImpl(object); + serialize(value, writer, allOffsets[T]!, allOffsets); + setProperty(this.object, offset, object); + } + } + + @tryInline + @override + void writeByteList(int offset, List? values) { + setProperty(object, offset, values ?? nullNumber); + } + + @tryInline + @override + void writeBoolList(int offset, List? values) { + final list = values + ?.map( + (e) => e == false + ? 0 + : e == true + ? 1 + : nullNumber, + ) + .toList(); + setProperty(object, offset, list ?? nullNumber); + } + + @tryInline + @override + void writeIntList(int offset, List? values) { + final list = values?.map((e) => e ?? nullNumber).toList(); + setProperty(object, offset, list ?? nullNumber); + } + + @tryInline + @override + void writeFloatList(int offset, List? values) { + final list = values?.map((e) => e ?? nullNumber).toList(); + setProperty(object, offset, list ?? nullNumber); + } + + @tryInline + @override + void writeLongList(int offset, List? values) { + final list = values?.map((e) => e ?? nullNumber).toList(); + setProperty(object, offset, list ?? nullNumber); + } + + @tryInline + @override + void writeDoubleList(int offset, List? values) { + final list = values?.map((e) => e ?? nullNumber).toList(); + setProperty(object, offset, list ?? nullNumber); + } + + @tryInline + @override + void writeDateTimeList(int offset, List? values) { + final list = values + ?.map((e) => e?.toUtc().millisecondsSinceEpoch ?? nullNumber) + .toList(); + setProperty(object, offset, list ?? nullNumber); + } + + @tryInline + @override + void writeStringList(int offset, List? values) { + final list = values?.map((e) => e ?? nullNumber).toList(); + setProperty(object, offset, list ?? nullNumber); + } + + @tryInline + @override + void writeObjectList( + int offset, + Map> allOffsets, + Serialize serialize, + List? values, + ) { + if (values != null) { + final list = values.map((e) { + if (e != null) { + final object = newObject(); + final writer = IsarWriterImpl(object); + serialize(e, writer, allOffsets[T]!, allOffsets); + return object; + } + }).toList(); + setProperty(object, offset, list); + } + } +} diff --git a/lib/src/web/open.dart b/lib/src/web/open.dart new file mode 100644 index 0000000..5e50791 --- /dev/null +++ b/lib/src/web/open.dart @@ -0,0 +1,82 @@ +// ignore_for_file: public_member_api_docs, invalid_use_of_protected_member + +import 'dart:html'; +//import 'dart:js_util'; + +import 'package:isar/isar.dart'; +/*import 'package:isar/src/common/schemas.dart'; + +import 'package:isar/src/web/bindings.dart'; +import 'package:isar/src/web/isar_collection_impl.dart'; +import 'package:isar/src/web/isar_impl.dart';*/ +import 'package:isar/src/web/isar_web.dart'; +import 'package:meta/meta.dart'; + +bool _loaded = false; +Future initializeIsarWeb([String? jsUrl]) async { + if (_loaded) { + return; + } + _loaded = true; + + final script = ScriptElement(); + script.type = 'text/javascript'; + // ignore: unsafe_html + script.src = 'https://unpkg.com/isar@${Isar.version}/dist/index.js'; + script.async = true; + document.head!.append(script); + await script.onLoad.first.timeout( + const Duration(seconds: 30), + onTimeout: () { + throw IsarError('Failed to load Isar'); + }, + ); +} + +@visibleForTesting +void doNotInitializeIsarWeb() { + _loaded = true; +} + +Future openIsar({ + required List> schemas, + String? directory, + required String name, + required int maxSizeMiB, + required bool relaxedDurability, + CompactCondition? compactOnLaunch, +}) async { + throw IsarError('Please use Isar 2.5.0 if you need web support. ' + 'A 3.x version with web support will be released soon.'); + /*await initializeIsarWeb(); + final schemasJson = getSchemas(schemas).map((e) => e.toJson()); + final schemasJs = jsify(schemasJson.toList()) as List; + final instance = await openIsarJs(name, schemasJs, relaxedDurability) + .wait(); + final isar = IsarImpl(name, instance); + final cols = >{}; + for (final schema in schemas) { + final col = instance.getCollection(schema.name); + schema.toCollection(() { + schema as CollectionSchema; + cols[OBJ] = IsarCollectionImpl( + isar: isar, + native: col, + schema: schema, + ); + }); + } + + isar.attachCollections(cols); + return isar;*/ +} + +Isar openIsarSync({ + required List> schemas, + String? directory, + required String name, + required int maxSizeMiB, + required bool relaxedDurability, + CompactCondition? compactOnLaunch, +}) => + unsupportedOnWeb(); diff --git a/lib/src/web/query_build.dart b/lib/src/web/query_build.dart new file mode 100644 index 0000000..89ff6dc --- /dev/null +++ b/lib/src/web/query_build.dart @@ -0,0 +1,375 @@ +// ignore_for_file: public_member_api_docs, invalid_use_of_protected_member + +import 'dart:indexed_db'; + +import 'package:isar/isar.dart'; + +import 'package:isar/src/web/bindings.dart'; +import 'package:isar/src/web/isar_collection_impl.dart'; +import 'package:isar/src/web/isar_web.dart'; +import 'package:isar/src/web/query_impl.dart'; + +Query buildWebQuery( + IsarCollectionImpl col, + List whereClauses, + bool whereDistinct, + Sort whereSort, + FilterOperation? filter, + List sortBy, + List distinctBy, + int? offset, + int? limit, + String? property, +) { + final whereClausesJs = whereClauses.map((wc) { + if (wc is IdWhereClause) { + return _buildIdWhereClause(wc); + } else if (wc is IndexWhereClause) { + return _buildIndexWhereClause(col.schema, wc); + } else { + return _buildLinkWhereClause(col, wc as LinkWhereClause); + } + }).toList(); + + final filterJs = filter != null ? _buildFilter(col.schema, filter) : null; + final sortJs = sortBy.isNotEmpty ? _buildSort(sortBy) : null; + final distinctJs = distinctBy.isNotEmpty ? _buildDistinct(distinctBy) : null; + + final queryJs = QueryJs( + col.native, + whereClausesJs, + whereDistinct, + whereSort == Sort.asc, + filterJs, + sortJs, + distinctJs, + offset, + limit, + ); + + QueryDeserialize deserialize; + //if (property == null) { + deserialize = col.deserializeObject as T Function(Object); + /*} else { + deserialize = (jsObj) => col.schema.deserializeProp(jsObj, property) as T; + }*/ + + return QueryImpl(col, queryJs, deserialize, property); +} + +dynamic _valueToJs(dynamic value) { + if (value == null) { + return double.negativeInfinity; + } else if (value == true) { + return 1; + } else if (value == false) { + return 0; + } else if (value is DateTime) { + return value.toUtc().millisecondsSinceEpoch; + } else if (value is List) { + return value.map(_valueToJs).toList(); + } else { + return value; + } +} + +IdWhereClauseJs _buildIdWhereClause(IdWhereClause wc) { + return IdWhereClauseJs() + ..range = _buildKeyRange( + wc.lower, + wc.upper, + wc.includeLower, + wc.includeUpper, + ); +} + +IndexWhereClauseJs _buildIndexWhereClause( + CollectionSchema schema, + IndexWhereClause wc, +) { + final index = schema.index(wc.indexName); + + final lower = wc.lower?.toList(); + final upper = wc.upper?.toList(); + if (upper != null) { + while (index.properties.length > upper.length) { + upper.add([]); + } + } + + dynamic lowerUnwrapped = wc.lower; + if (index.properties.length == 1 && lower != null) { + lowerUnwrapped = lower.isNotEmpty ? lower[0] : null; + } + + dynamic upperUnwrapped = upper; + if (index.properties.length == 1 && upper != null) { + upperUnwrapped = upper.isNotEmpty ? upper[0] : double.infinity; + } + + return IndexWhereClauseJs() + ..indexName = wc.indexName + ..range = _buildKeyRange( + wc.lower != null ? _valueToJs(lowerUnwrapped) : null, + wc.upper != null ? _valueToJs(upperUnwrapped) : null, + wc.includeLower, + wc.includeUpper, + ); +} + +LinkWhereClauseJs _buildLinkWhereClause( + IsarCollectionImpl col, + LinkWhereClause wc, +) { + // ignore: unused_local_variable + final linkCol = col.isar.getCollectionByNameInternal(wc.linkCollection)! + as IsarCollectionImpl; + //final backlinkLinkName = linkCol.schema.backlinkLinkNames[wc.linkName]; + return LinkWhereClauseJs() + ..linkCollection = wc.linkCollection + //..linkName = backlinkLinkName ?? wc.linkName + //..backlink = backlinkLinkName != null + ..id = wc.id; +} + +KeyRange? _buildKeyRange( + dynamic lower, + dynamic upper, + bool includeLower, + bool includeUpper, +) { + if (lower != null) { + if (upper != null) { + final boundsEqual = idbCmp(lower, upper) == 0; + if (boundsEqual) { + if (includeLower && includeUpper) { + return KeyRange.only(lower); + } else { + // empty range + return KeyRange.upperBound(double.negativeInfinity, true); + } + } + + return KeyRange.bound( + lower, + upper, + !includeLower, + !includeUpper, + ); + } else { + return KeyRange.lowerBound(lower, !includeLower); + } + } else if (upper != null) { + return KeyRange.upperBound(upper, !includeUpper); + } + return null; +} + +FilterJs? _buildFilter( + CollectionSchema schema, + FilterOperation filter, +) { + final filterStr = _buildFilterOperation(schema, filter); + if (filterStr != null) { + return FilterJs('id', 'obj', 'return $filterStr'); + } else { + return null; + } +} + +String? _buildFilterOperation( + CollectionSchema schema, + FilterOperation filter, +) { + if (filter is FilterGroup) { + return _buildFilterGroup(schema, filter); + } else if (filter is LinkFilter) { + unsupportedOnWeb(); + } else if (filter is FilterCondition) { + return _buildCondition(schema, filter); + } else { + return null; + } +} + +String? _buildFilterGroup(CollectionSchema schema, FilterGroup group) { + final builtConditions = group.filters + .map((op) => _buildFilterOperation(schema, op)) + .where((e) => e != null) + .toList(); + + if (builtConditions.isEmpty) { + return null; + } + + if (group.type == FilterGroupType.not) { + return '!(${builtConditions[0]})'; + } else if (builtConditions.length == 1) { + return builtConditions[0]; + } else if (group.type == FilterGroupType.xor) { + final conditions = builtConditions.join(','); + return 'IsarQuery.xor($conditions)'; + } else { + final op = group.type == FilterGroupType.or ? '||' : '&&'; + final condition = builtConditions.join(op); + return '($condition)'; + } +} + +String _buildCondition( + CollectionSchema schema, + FilterCondition condition, +) { + dynamic _prepareFilterValue(dynamic value) { + if (value == null) { + return null; + } else if (value is String) { + return stringify(value); + } else { + return _valueToJs(value); + } + } + + final isListOp = condition.type != FilterConditionType.isNull && + condition.type != FilterConditionType.listLength && + schema.property(condition.property).type.isList; + final accessor = + condition.property == schema.idName ? 'id' : 'obj.${condition.property}'; + final variable = isListOp ? 'e' : accessor; + + final cond = _buildConditionInternal( + conditionType: condition.type, + variable: variable, + val1: _prepareFilterValue(condition.value1), + include1: condition.include1, + val2: _prepareFilterValue(condition.value2), + include2: condition.include2, + caseSensitive: condition.caseSensitive, + ); + + if (isListOp) { + return '(Array.isArray($accessor) && $accessor.some(e => $cond))'; + } else { + return cond; + } +} + +String _buildConditionInternal({ + required FilterConditionType conditionType, + required String variable, + required Object? val1, + required bool include1, + required Object? val2, + required bool include2, + required bool caseSensitive, +}) { + final isNull = '($variable == null || $variable === -Infinity)'; + switch (conditionType) { + case FilterConditionType.equalTo: + if (val1 == null) { + return isNull; + } else if (val1 is String && !caseSensitive) { + return '$variable?.toLowerCase() === ${val1.toLowerCase()}'; + } else { + return '$variable === $val1'; + } + case FilterConditionType.between: + final val = val1 ?? val2; + final lowerOp = include1 ? '>=' : '>'; + final upperOp = include2 ? '<=' : '<'; + if (val == null) { + return isNull; + } else if ((val1 is String?) && (val2 is String?) && !caseSensitive) { + final lower = val1?.toLowerCase() ?? '-Infinity'; + final upper = val2?.toLowerCase() ?? '-Infinity'; + final variableLc = '$variable?.toLowerCase() ?? -Infinity'; + final lowerCond = 'indexedDB.cmp($variableLc, $lower) $lowerOp 0'; + final upperCond = 'indexedDB.cmp($variableLc, $upper) $upperOp 0'; + return '($lowerCond && $upperCond)'; + } else { + final lowerCond = + 'indexedDB.cmp($variable, ${val1 ?? '-Infinity'}) $lowerOp 0'; + final upperCond = + 'indexedDB.cmp($variable, ${val2 ?? '-Infinity'}) $upperOp 0'; + return '($lowerCond && $upperCond)'; + } + case FilterConditionType.lessThan: + if (val1 == null) { + if (include1) { + return isNull; + } else { + return 'false'; + } + } else { + final op = include1 ? '<=' : '<'; + if (val1 is String && !caseSensitive) { + return 'indexedDB.cmp($variable?.toLowerCase() ?? ' + '-Infinity, ${val1.toLowerCase()}) $op 0'; + } else { + return 'indexedDB.cmp($variable, $val1) $op 0'; + } + } + case FilterConditionType.greaterThan: + if (val1 == null) { + if (include1) { + return 'true'; + } else { + return '!$isNull'; + } + } else { + final op = include1 ? '>=' : '>'; + if (val1 is String && !caseSensitive) { + return 'indexedDB.cmp($variable?.toLowerCase() ?? ' + '-Infinity, ${val1.toLowerCase()}) $op 0'; + } else { + return 'indexedDB.cmp($variable, $val1) $op 0'; + } + } + case FilterConditionType.startsWith: + case FilterConditionType.endsWith: + case FilterConditionType.contains: + final op = conditionType == FilterConditionType.startsWith + ? 'startsWith' + : conditionType == FilterConditionType.endsWith + ? 'endsWith' + : 'includes'; + if (val1 is String) { + final isString = 'typeof $variable == "string"'; + if (!caseSensitive) { + return '($isString && $variable.toLowerCase() ' + '.$op(${val1.toLowerCase()}))'; + } else { + return '($isString && $variable.$op($val1))'; + } + } else { + throw IsarError('Unsupported type for condition'); + } + case FilterConditionType.matches: + throw UnimplementedError(); + case FilterConditionType.isNull: + return isNull; + // ignore: no_default_cases + default: + throw UnimplementedError(); + } +} + +SortCmpJs _buildSort(List properties) { + final sort = properties.map((e) { + final op = e.sort == Sort.asc ? '' : '-'; + return '${op}indexedDB.cmp(a.${e.property} ?? "-Infinity", b.${e.property} ' + '?? "-Infinity")'; + }).join('||'); + return SortCmpJs('a', 'b', 'return $sort'); +} + +DistinctValueJs _buildDistinct(List properties) { + final distinct = properties.map((e) { + if (e.caseSensitive == false) { + return 'obj.${e.property}?.toLowerCase() ?? "-Infinity"'; + } else { + return 'obj.${e.property}?.toString() ?? "-Infinity"'; + } + }).join('+'); + return DistinctValueJs('obj', 'return $distinct'); +} diff --git a/lib/src/web/query_impl.dart b/lib/src/web/query_impl.dart new file mode 100644 index 0000000..e2bfc15 --- /dev/null +++ b/lib/src/web/query_impl.dart @@ -0,0 +1,180 @@ +// ignore_for_file: public_member_api_docs + +import 'dart:async'; +import 'dart:convert'; +import 'dart:js'; +import 'dart:typed_data'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/web/bindings.dart'; + +import 'package:isar/src/web/isar_collection_impl.dart'; +import 'package:isar/src/web/isar_web.dart'; + +typedef QueryDeserialize = T Function(Object); + +class QueryImpl extends Query { + QueryImpl(this.col, this.queryJs, this.deserialize, this.propertyName); + final IsarCollectionImpl col; + final QueryJs queryJs; + final QueryDeserialize deserialize; + final String? propertyName; + + @override + Isar get isar => col.isar; + + @override + Future findFirst() { + return col.isar.getTxn(false, (IsarTxnJs txn) async { + final result = await queryJs.findFirst(txn).wait(); + if (result == null) { + return null; + } + return deserialize(result); + }); + } + + @override + T? findFirstSync() => unsupportedOnWeb(); + + @override + Future> findAll() { + return col.isar.getTxn(false, (IsarTxnJs txn) async { + final result = await queryJs.findAll(txn).wait>(); + return result.map((e) => deserialize(e as Object)).toList(); + }); + } + + @override + List findAllSync() => unsupportedOnWeb(); + + @override + Future aggregate(AggregationOp op) { + return col.isar.getTxn(false, (IsarTxnJs txn) async { + final property = propertyName ?? col.schema.idName; + + num? result; + switch (op) { + case AggregationOp.min: + result = await queryJs.min(txn, property).wait(); + break; + case AggregationOp.max: + result = await queryJs.max(txn, property).wait(); + break; + case AggregationOp.sum: + result = await queryJs.sum(txn, property).wait(); + break; + case AggregationOp.average: + result = await queryJs.average(txn, property).wait(); + break; + case AggregationOp.count: + result = await queryJs.count(txn).wait(); + break; + // ignore: no_default_cases + default: + throw UnimplementedError(); + } + + if (result == null) { + return null; + } + + if (R == DateTime) { + return DateTime.fromMillisecondsSinceEpoch(result.toInt()).toLocal() + as R; + } else if (R == int) { + return result.toInt() as R; + } else if (R == double) { + return result.toDouble() as R; + } else { + return null; + } + }); + } + + @override + R? aggregateSync(AggregationOp op) => unsupportedOnWeb(); + + @override + Future deleteFirst() { + return col.isar.getTxn(true, (IsarTxnJs txn) { + return queryJs.deleteFirst(txn).wait(); + }); + } + + @override + bool deleteFirstSync() => unsupportedOnWeb(); + + @override + Future deleteAll() { + return col.isar.getTxn(true, (IsarTxnJs txn) { + return queryJs.deleteAll(txn).wait(); + }); + } + + @override + int deleteAllSync() => unsupportedOnWeb(); + + @override + Stream> watch({bool fireImmediately = false}) { + JsFunction? stop; + final controller = StreamController>( + onCancel: () { + stop?.apply([]); + }, + ); + + if (fireImmediately) { + findAll().then(controller.add); + } + + final Null Function(List results) callback = + allowInterop((List results) { + controller.add(results.map((e) => deserialize(e as Object)).toList()); + }); + stop = col.native.watchQuery(queryJs, callback); + + return controller.stream; + } + + @override + Stream watchLazy({bool fireImmediately = false}) { + JsFunction? stop; + final controller = StreamController( + onCancel: () { + stop?.apply([]); + }, + ); + + final Null Function() callback = allowInterop(() { + controller.add(null); + }); + stop = col.native.watchQueryLazy(queryJs, callback); + + return controller.stream; + } + + @override + Future exportJsonRaw(R Function(Uint8List) callback) async { + return col.isar.getTxn(false, (IsarTxnJs txn) async { + final result = await queryJs.findAll(txn).wait(); + final jsonStr = stringify(result); + return callback(const Utf8Encoder().convert(jsonStr)); + }); + } + + @override + Future>> exportJson() { + return col.isar.getTxn(false, (IsarTxnJs txn) async { + final result = await queryJs.findAll(txn).wait>(); + return result.map((e) => jsMapToDart(e as Object)).toList(); + }); + } + + @override + R exportJsonRawSync(R Function(Uint8List) callback) => unsupportedOnWeb(); + + @override + List> exportJsonSync({bool primitiveNull = true}) => + unsupportedOnWeb(); +} diff --git a/lib/src/web/split_words.dart b/lib/src/web/split_words.dart new file mode 100644 index 0000000..fa29ddd --- /dev/null +++ b/lib/src/web/split_words.dart @@ -0,0 +1,5 @@ +// ignore_for_file: public_member_api_docs + +import 'package:isar/src/web/isar_web.dart'; + +List isarSplitWords(String input) => unsupportedOnWeb(); diff --git a/pubspec.yaml b/pubspec.yaml new file mode 100644 index 0000000..e3c0ba5 --- /dev/null +++ b/pubspec.yaml @@ -0,0 +1,22 @@ +name: isar +description: Extremely fast, easy to use, and fully async NoSQL database for Flutter. +version: 3.1.0+1 +repository: https://github.com/isar/isar/tree/main/packages/isar +homepage: https://github.com/isar/isar +issue_tracker: https://github.com/isar/isar/issues +documentation: https://isar.dev +funding: + - https://github.com/sponsors/leisim/ + +environment: + sdk: ">=2.17.0 <3.0.0" + +dependencies: + ffi: ">=2.0.0 <3.0.0" + js: ^0.6.4 + meta: ^1.7.0 + +dev_dependencies: + ffigen: ">=6.1.2 <8.0.0" + test: ^1.21.1 + very_good_analysis: ^3.0.1 diff --git a/test/isar_reader_writer_test.dart b/test/isar_reader_writer_test.dart new file mode 100644 index 0000000..5fc83de --- /dev/null +++ b/test/isar_reader_writer_test.dart @@ -0,0 +1,287 @@ +@TestOn('vm') + +// ignore_for_file: constant_identifier_names + +import 'dart:convert'; +import 'dart:io'; +import 'dart:typed_data'; + +import 'package:isar/isar.dart'; +import 'package:isar/src/native/isar_core.dart'; +import 'package:isar/src/native/isar_reader_impl.dart'; +import 'package:isar/src/native/isar_writer_impl.dart'; +import 'package:test/test.dart'; + +void main() { + group('Golden Binary', () { + late final json = + File('../isar_core/tests/binary_golden.json').readAsStringSync(); + late final tests = (jsonDecode(json) as List) + .map((e) => BinaryTest.fromJson(e as Map)) + .toList(); + + test('IsarReader', () { + var t = 0; + for (final test in tests) { + final reader = IsarReaderImpl(Uint8List.fromList(test.bytes)); + var offset = 2; + for (var i = 0; i < test.types.length; i++) { + final type = test.types[i]; + final nullableValue = type.read(reader, offset, true); + expect(nullableValue, test.values[i], reason: '${test.types} $t'); + + final nonNullableValue = type.read(reader, offset, false); + _expectIgnoreNull(nonNullableValue, test.values[i], type); + offset += type.size; + } + t++; + } + }); + + test('IsarWriter', () { + for (final test in tests) { + final buffer = Uint8List(10000); + final size = + test.types.fold(0, (sum, type) => sum + type.size) + 2; + + final bufferView = buffer.buffer.asUint8List(0, test.bytes.length); + final writer = IsarWriterImpl(bufferView, size); + var offset = 2; + for (var i = 0; i < test.types.length; i++) { + final type = test.types[i]; + final value = test.values[i]; + type.write(writer, offset, value); + offset += type.size; + } + + expect(buffer.sublist(0, test.bytes.length), test.bytes); + } + }); + }); +} + +enum Type { + Bool(1, false, _readBool, _writeBool), + Byte(1, 0, _readByte, _writeByte), + Int(4, nullInt, _readInt, _writeInt), + Float(4, nullFloat, _readFloat, _writeFloat), + Long(8, nullLong, _readLong, _writeLong), + Double(8, nullDouble, _readDouble, _writeDouble), + String(3, '', _readString, _writeString), + BoolList(3, false, _readBoolList, _writeBoolList), + ByteList(3, 0, _readByteList, _writeByteList), + IntList(3, nullInt, _readIntList, _writeIntList), + FloatList(3, nullFloat, _readFloatList, _writeFloatList), + LongList(3, nullLong, _readLongList, _writeLongList), + DoubleList(3, nullDouble, _readDoubleList, _writeDoubleList), + StringList(3, '', _readStringList, _writeStringList); + + const Type(this.size, this.nullValue, this.read, this.write); + + final int size; + final dynamic nullValue; + final dynamic Function(IsarReader reader, int offset, bool nullable) read; + final void Function(IsarWriter reader, int offset, dynamic value) write; +} + +class BinaryTest { + const BinaryTest(this.types, this.values, this.bytes); + + factory BinaryTest.fromJson(Map json) { + return BinaryTest( + (json['types'] as List) + .map((type) => Type.values.firstWhere((t) => t.name == type)) + .toList(), + json['values'] as List, + (json['bytes'] as List).cast(), + ); + } + + final List types; + final List values; + final List bytes; +} + +void _expectIgnoreNull( + dynamic left, + dynamic right, + Type type, { + bool inList = false, +}) { + if (right == null && (type.index < Type.BoolList.index || inList)) { + if (left is double) { + expect(left, isNaN); + } else { + expect(left, type.nullValue); + } + } else if (right is List) { + left as List; + for (var i = 0; i < right.length; i++) { + _expectIgnoreNull(left[i], right[i], type, inList: true); + } + } else { + expect(left, right); + } +} + +bool? _readBool(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readBoolOrNull(offset); + } else { + return reader.readBool(offset); + } +} + +void _writeBool(IsarWriter writer, int offset, dynamic value) { + writer.writeBool(offset, value as bool?); +} + +int? _readByte(IsarReader reader, int offset, bool nullable) { + return reader.readByte(offset); +} + +void _writeByte(IsarWriter writer, int offset, dynamic value) { + writer.writeByte(offset, value as int); +} + +int? _readInt(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readIntOrNull(offset); + } else { + return reader.readInt(offset); + } +} + +void _writeInt(IsarWriter writer, int offset, dynamic value) { + writer.writeInt(offset, value as int?); +} + +double? _readFloat(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readFloatOrNull(offset); + } else { + return reader.readFloat(offset); + } +} + +void _writeFloat(IsarWriter writer, int offset, dynamic value) { + writer.writeFloat(offset, value as double?); +} + +int? _readLong(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readLongOrNull(offset); + } else { + return reader.readLong(offset); + } +} + +void _writeLong(IsarWriter writer, int offset, dynamic value) { + writer.writeLong(offset, value as int?); +} + +double? _readDouble(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readDoubleOrNull(offset); + } else { + return reader.readDouble(offset); + } +} + +void _writeDouble(IsarWriter writer, int offset, dynamic value) { + writer.writeDouble(offset, value as double?); +} + +String? _readString(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readStringOrNull(offset); + } else { + return reader.readString(offset); + } +} + +void _writeString(IsarWriter writer, int offset, dynamic value) { + final bytes = value is String ? utf8.encode(value) as Uint8List : null; + writer.writeByteList(offset, bytes); +} + +List? _readBoolList(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readBoolOrNullList(offset); + } else { + return reader.readBoolList(offset); + } +} + +void _writeBoolList(IsarWriter writer, int offset, dynamic value) { + writer.writeBoolList(offset, (value as List?)?.cast()); +} + +List? _readByteList(IsarReader reader, int offset, bool nullable) { + return reader.readByteList(offset); +} + +void _writeByteList(IsarWriter writer, int offset, dynamic value) { + final bytes = value is List ? Uint8List.fromList(value.cast()) : null; + writer.writeByteList(offset, bytes); +} + +List? _readIntList(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readIntOrNullList(offset); + } else { + return reader.readIntList(offset); + } +} + +void _writeIntList(IsarWriter writer, int offset, dynamic value) { + writer.writeIntList(offset, (value as List?)?.cast()); +} + +List? _readFloatList(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readFloatOrNullList(offset); + } else { + return reader.readFloatList(offset); + } +} + +void _writeFloatList(IsarWriter writer, int offset, dynamic value) { + writer.writeFloatList(offset, (value as List?)?.cast()); +} + +List? _readLongList(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readLongOrNullList(offset); + } else { + return reader.readLongList(offset); + } +} + +void _writeLongList(IsarWriter writer, int offset, dynamic value) { + writer.writeLongList(offset, (value as List?)?.cast()); +} + +List? _readDoubleList(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readDoubleOrNullList(offset); + } else { + return reader.readDoubleList(offset); + } +} + +void _writeDoubleList(IsarWriter writer, int offset, dynamic value) { + writer.writeDoubleList(offset, (value as List?)?.cast()); +} + +List? _readStringList(IsarReader reader, int offset, bool nullable) { + if (nullable) { + return reader.readStringOrNullList(offset); + } else { + return reader.readStringList(offset); + } +} + +void _writeStringList(IsarWriter writer, int offset, dynamic value) { + writer.writeStringList(offset, (value as List?)?.cast()); +} diff --git a/tool/get_version.dart b/tool/get_version.dart new file mode 100644 index 0000000..cd12a90 --- /dev/null +++ b/tool/get_version.dart @@ -0,0 +1,6 @@ +import 'package:isar/isar.dart'; + +void main() { + // ignore: avoid_print + print(Isar.version); +} diff --git a/tool/verify_release_version.dart b/tool/verify_release_version.dart new file mode 100644 index 0000000..1ad3e91 --- /dev/null +++ b/tool/verify_release_version.dart @@ -0,0 +1,9 @@ +import 'package:isar/isar.dart'; + +void main(List args) { + if (Isar.version != args[0]) { + throw StateError( + 'Invalid Isar version for release: ${Isar.version} != ${args[0]}', + ); + } +}