From a6f4423bf0556b5e06910c5ed13100bfcf76be54 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Fri, 29 Nov 2024 15:50:33 +0200
Subject: [PATCH 01/26] Added Common package, tests, and a CLI demo.

---
 .gitignore                                    |   60 +
 Directory.build.props                         |    6 +
 IsExternalInit.cs                             |    7 +
 LICENSE                                       |  177 +++
 .../Connection/IPowerSyncBackendConnector.cs  |   27 +
 .../Client/Connection/PowerSyncCredentials.cs |    7 +
 .../Client/PowerSyncDatabase.cs               |  718 +++++++++++
 .../PowerSync.Common/Client/SQLOpenFactory.cs |   24 +
 .../Sync/Bucket/BucketStorageAdapter.cs       |  116 ++
 .../Client/Sync/Bucket/OpType.cs              |   34 +
 .../Client/Sync/Bucket/OplogEntry.cs          |   75 ++
 .../Client/Sync/Bucket/SqliteBucketStorage.cs |  437 +++++++
 .../Client/Sync/Bucket/SyncDataBatch.cs       |    6 +
 .../Client/Sync/Bucket/SyncDataBucket.cs      |   74 ++
 .../Client/Sync/Stream/Remote.cs              |  187 +++
 .../Stream/StreamingSyncImplementation.cs     |  803 +++++++++++++
 .../Client/Sync/Stream/StreamingSyncTypes.cs  |  149 +++
 .../PowerSync.Common/DB/Crud/CrudBatch.cs     |   16 +
 .../PowerSync.Common/DB/Crud/CrudEntry.cs     |  113 ++
 .../DB/Crud/CrudTransaction.cs                |    9 +
 .../PowerSync.Common/DB/Crud/SyncStatus.cs    |   85 ++
 .../DB/Crud/UploadQueueStatus.cs              |   20 +
 PowerSync/PowerSync.Common/DB/IDBAdapter.cs   |  167 +++
 .../PowerSync.Common/DB/Schema/Column.cs      |   34 +
 PowerSync/PowerSync.Common/DB/Schema/Index.cs |   25 +
 .../DB/Schema/IndexedColumn.cs                |   31 +
 .../PowerSync.Common/DB/Schema/Schema.cs      |   27 +
 PowerSync/PowerSync.Common/DB/Schema/Table.cs |   67 ++
 .../MDSQLite/MDSQLiteAdapter.cs               |  325 +++++
 .../MDSQLite/MDSQLiteConnection.cs            |  222 ++++
 .../MDSQLite/MDSQLiteDBOpenFactory.cs         |   28 +
 .../MDSQLite/MDSQLiteOptions.cs               |  137 +++
 .../PowerSync.Common/PowerSync.Common.csproj  |   27 +
 PowerSync/PowerSync.Common/README.md          |   48 +
 .../PowerSync.Common/Utils/EventStream.cs     |  159 +++
 .../Utils/PowerSyncPathResolver.cs            |   59 +
 README.md                                     |   57 +
 Tests/PowerSync/.DS_Store                     |  Bin 0 -> 6148 bytes
 .../BucketStorageTests.cs                     | 1049 +++++++++++++++++
 .../EventStreamTests.cs                       |  114 ++
 .../PowerSync.Common.Tests.csproj             |   34 +
 .../PowerSyncCredentialsTests.cs              |  143 +++
 Tools/Setup/Setup.cs                          |  130 ++
 Tools/Setup/Setup.csproj                      |    7 +
 demos/Command-Line/CLI/AppSchema.cs           |   33 +
 demos/Command-Line/CLI/CLI.csproj             |   25 +
 demos/Command-Line/CLI/Demo.cs                |  106 ++
 demos/Command-Line/CLI/NodeConnector.cs       |  125 ++
 demos/Command-Line/CLI/README.md              |   24 +
 demos/Command-Line/CLI/user_id.txt            |    1 +
 root.sln                                      |   51 +
 51 files changed, 6405 insertions(+)
 create mode 100644 .gitignore
 create mode 100644 Directory.build.props
 create mode 100644 IsExternalInit.cs
 create mode 100644 LICENSE
 create mode 100644 PowerSync/PowerSync.Common/Client/Connection/IPowerSyncBackendConnector.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Connection/PowerSyncCredentials.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Bucket/OpType.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Bucket/OplogEntry.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBatch.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBucket.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
 create mode 100644 PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Crud/CrudBatch.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Crud/CrudTransaction.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Crud/UploadQueueStatus.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/IDBAdapter.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Schema/Column.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Schema/Index.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Schema/Schema.cs
 create mode 100644 PowerSync/PowerSync.Common/DB/Schema/Table.cs
 create mode 100644 PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
 create mode 100644 PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
 create mode 100644 PowerSync/PowerSync.Common/MDSQLite/MDSQLiteDBOpenFactory.cs
 create mode 100644 PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs
 create mode 100644 PowerSync/PowerSync.Common/PowerSync.Common.csproj
 create mode 100644 PowerSync/PowerSync.Common/README.md
 create mode 100644 PowerSync/PowerSync.Common/Utils/EventStream.cs
 create mode 100644 PowerSync/PowerSync.Common/Utils/PowerSyncPathResolver.cs
 create mode 100644 README.md
 create mode 100644 Tests/PowerSync/.DS_Store
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/BucketStorageTests.cs
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs
 create mode 100644 Tools/Setup/Setup.cs
 create mode 100644 Tools/Setup/Setup.csproj
 create mode 100644 demos/Command-Line/CLI/AppSchema.cs
 create mode 100644 demos/Command-Line/CLI/CLI.csproj
 create mode 100644 demos/Command-Line/CLI/Demo.cs
 create mode 100644 demos/Command-Line/CLI/NodeConnector.cs
 create mode 100644 demos/Command-Line/CLI/README.md
 create mode 100644 demos/Command-Line/CLI/user_id.txt
 create mode 100644 root.sln

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..313bbdc
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,60 @@
+# Build directories
+bin/
+obj/
+
+# User-specific files
+*.user
+*.userosscache
+*.suo
+*.sln.docstates
+*.userprefs
+*.rsuser
+
+# Logs and debug output
+*.log
+*.tlog
+*.dmp
+
+# Visual Studio specific
+.vscode/
+.vs/
+
+# Rider specific
+.idea/
+*.sln.iml
+
+# Build results
+*.dll
+*.exe
+*.pdb
+*.cache
+*.ilk
+*.lib
+*.obj
+*.sbr
+*.swp
+*.sln.ide
+*.tlog
+*.lastbuildstate
+*.idb
+
+# NuGet packages
+*.nupkg
+*.snupkg
+packages/
+.nuget/
+
+# Others
+TestResults/
+*.VSIXProjectLaunch
+.project
+.vsconfig
+
+*.db
+*.db-shm
+*.db-wal
+
+#Core binaries - add rules after ps extension downloading is supported
+*.dylib
+*.dll
+*.so
\ No newline at end of file
diff --git a/Directory.build.props b/Directory.build.props
new file mode 100644
index 0000000..5b4d6fa
--- /dev/null
+++ b/Directory.build.props
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project>
+    <ItemGroup>
+        <Compile Include="$(MSBuildThisFileDirectory)IsExternalInit.cs" Visible="false" />
+    </ItemGroup>
+</Project>
\ No newline at end of file
diff --git a/IsExternalInit.cs b/IsExternalInit.cs
new file mode 100644
index 0000000..3cd1a48
--- /dev/null
+++ b/IsExternalInit.cs
@@ -0,0 +1,7 @@
+using System.ComponentModel;
+
+namespace System.Runtime.CompilerServices
+{
+    [EditorBrowsable(EditorBrowsableState.Never)]
+    internal class IsExternalInit { }
+}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..f433b1a
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,177 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
diff --git a/PowerSync/PowerSync.Common/Client/Connection/IPowerSyncBackendConnector.cs b/PowerSync/PowerSync.Common/Client/Connection/IPowerSyncBackendConnector.cs
new file mode 100644
index 0000000..a8fdb65
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Connection/IPowerSyncBackendConnector.cs
@@ -0,0 +1,27 @@
+namespace PowerSync.Common.Client.Connection;
+
+public interface IPowerSyncBackendConnector
+{
+    /// <summary> 
+    /// Allows the PowerSync client to retrieve an authentication token from your backend
+    /// which is used to authenticate against the PowerSync service.
+    /// <para /> 
+    /// This should always fetch a fresh set of credentials - don't use cached
+    /// values.
+    /// <para /> 
+    /// Return null if the user is not signed in. Throw an error if credentials
+    /// cannot be fetched due to a network error or other temporary error.
+    ///
+    /// This token is kept for the duration of a sync connection.
+    /// </summary>
+    Task<PowerSyncCredentials?> FetchCredentials();
+
+    /// <summary> 
+    /// Upload local changes to the app backend.
+    ///
+    /// Use <see cref="IPowerSyncDatabase.GetCrudBatch" /> to get a batch of changes to upload.
+    ///
+    /// Any thrown errors will result in a retry after the configured wait period (default: 5 seconds).
+    /// </summary>
+    Task UploadData(IPowerSyncDatabase database);
+}
diff --git a/PowerSync/PowerSync.Common/Client/Connection/PowerSyncCredentials.cs b/PowerSync/PowerSync.Common/Client/Connection/PowerSyncCredentials.cs
new file mode 100644
index 0000000..4730603
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Connection/PowerSyncCredentials.cs
@@ -0,0 +1,7 @@
+namespace PowerSync.Common.Client.Connection;
+public class PowerSyncCredentials(string endpoint, string token, DateTime? expiresAt = null)
+{
+    public string Endpoint { get; set; } = endpoint;
+    public string Token { get; set; } = token;
+    public DateTime? ExpiresAt { get; set; } = expiresAt;
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
new file mode 100644
index 0000000..ceb6d0c
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
@@ -0,0 +1,718 @@
+namespace PowerSync.Common.Client;
+
+using System.Text.RegularExpressions;
+using System.Threading.Tasks;
+
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+
+using Newtonsoft.Json;
+
+using PowerSync.Common.Client.Connection;
+using PowerSync.Common.Client.Sync.Bucket;
+using PowerSync.Common.Client.Sync.Stream;
+using PowerSync.Common.DB;
+using PowerSync.Common.DB.Crud;
+using PowerSync.Common.DB.Schema;
+using PowerSync.Common.MDSQLite;
+using PowerSync.Common.Utils;
+
+public class BasePowerSyncDatabaseOptions()
+{
+    /// <summary>
+    /// Schema used for the local database.
+    /// </summary>
+    public Schema Schema { get; set; } = null!;
+
+    public ILogger? Logger { get; set; } = null!;
+
+}
+
+public abstract class DatabaseSource { }
+
+public class DBAdapterSource(IDBAdapter Adapter) : DatabaseSource
+{
+    public IDBAdapter Adapter { get; init; } = Adapter;
+}
+
+public class OpenFactorySource(ISQLOpenFactory Factory) : DatabaseSource
+{
+    public ISQLOpenFactory Factory { get; init; } = Factory;
+}
+
+
+public class PowerSyncDatabaseOptions() : BasePowerSyncDatabaseOptions()
+{
+    /// <summary> 
+    /// Source for a SQLite database connection.
+    /// </summary>
+    public DatabaseSource Database { get; set; } = null!;
+
+}
+
+public class PowerSyncDBEvent : StreamingSyncImplementationEvent
+{
+    public bool? Initialized { get; set; }
+    public Schema? SchemaChanged { get; set; }
+}
+
+public interface IPowerSyncDatabase : IEventStream<PowerSyncDBEvent>
+{
+    public Task Connect(IPowerSyncBackendConnector connector, PowerSyncConnectionOptions? options = null);
+    public Task<string> GetClientId();
+
+    public Task<CrudBatch?> GetCrudBatch(int limit);
+
+    public Task<CrudTransaction?> GetNextCrudTransaction();
+}
+
+public class PowerSyncDatabase : EventStream<PowerSyncDBEvent>, IPowerSyncDatabase
+{
+
+    public IDBAdapter Database;
+    private Schema schema;
+
+    private static readonly int DEFAULT_WATCH_THROTTLE_MS = 30;
+    private static readonly Regex POWERSYNC_TABLE_MATCH = new Regex(@"(^ps_data__|^ps_data_local__)", RegexOptions.Compiled);
+
+    public bool Closed;
+    public bool Ready;
+
+    protected Task isReadyTask;
+
+    private StreamingSyncImplementation? syncStreamImplementation;
+    public string SdkVersion;
+
+    protected IBucketStorageAdapter BucketStorageAdapter;
+
+    protected CancellationTokenSource? syncStreamStatusCts;
+
+    protected SyncStatus CurrentStatus;
+
+    public ILogger Logger;
+
+    public PowerSyncDatabase(PowerSyncDatabaseOptions options)
+    {
+        if (options.Database is DBAdapterSource adapterSource)
+        {
+            Database = adapterSource.Adapter;
+        }
+        else if (options.Database is OpenFactorySource factorySource)
+        {
+            Database = factorySource.Factory.OpenDatabase();
+        }
+        else if (options.Database is SQLOpenOptions openOptions)
+        {
+            // TODO default to MDSQLite factory for now
+            // Can be broken out, rename this class to Abstract
+            // `this.openDBAdapter(options)`
+            Database = new MDSQLiteAdapter(new MDSQLiteAdapterOptions
+            {
+                Name = openOptions.DbFilename,
+                SqliteOptions = null
+            });
+        }
+        else
+        {
+            throw new ArgumentException("The provided `Database` option is invalid.");
+        }
+        Logger = options.Logger ?? NullLogger.Instance;
+        CurrentStatus = new SyncStatus(new SyncStatusOptions());
+        BucketStorageAdapter = generateBucketStorageAdapter();
+
+        Closed = false;
+        Ready = false;
+
+        schema = options.Schema;
+        SdkVersion = "";
+        isReadyTask = Initialize();
+    }
+
+    protected IBucketStorageAdapter generateBucketStorageAdapter()
+    {
+        return new SqliteBucketStorage(Database, Logger);
+    }
+
+    /// <summary>
+    /// Resolves once initialization is completed.
+    /// </summary>
+    public async Task WaitForReady()
+    {
+        if (Ready)
+        {
+            return;
+        }
+
+        await isReadyTask;
+    }
+
+    public async Task WaitForFirstSync(CancellationToken? cancellationToken = null)
+    {
+        if (CurrentStatus.HasSynced == true)
+        {
+            return;
+        }
+
+        var tcs = new TaskCompletionSource<bool>();
+        var cts = new CancellationTokenSource();
+
+        var _ = Task.Run(() =>
+        {
+            foreach (var update in Listen(cts.Token))
+            {
+                if (update.StatusChanged?.HasSynced == true)
+                {
+                    cts.Cancel();
+                    tcs.SetResult(true);
+                }
+            }
+        });
+
+        cancellationToken?.Register(() =>
+        {
+            cts.Cancel();
+            tcs.SetCanceled();
+        });
+
+        await tcs.Task;
+    }
+
+    protected async Task Initialize()
+    {
+        await BucketStorageAdapter.Init();
+        await LoadVersion();
+        await UpdateSchema(schema);
+        await UpdateHasSynced();
+        await Database.Execute("PRAGMA RECURSIVE_TRIGGERS=TRUE");
+        Ready = true;
+        Emit(new PowerSyncDBEvent { Initialized = true });
+    }
+
+    private record VersionResult(string version);
+
+    private async Task LoadVersion()
+    {
+        string sdkVersion = (await Database.Get<VersionResult>("SELECT powersync_rs_version() as version")).version;
+        SdkVersion = sdkVersion;
+
+        int[] versionInts;
+        try
+        {
+            versionInts = [.. sdkVersion
+                .Split(['.', '/'], StringSplitOptions.RemoveEmptyEntries)
+                .Take(3)
+                .Select(n => int.Parse(n))];
+        }
+        catch (Exception e)
+        {
+            throw new Exception(
+                $"Unsupported PowerSync extension version. Need >=0.2.0 <1.0.0, got: {sdkVersion}. Details: {e.Message}"
+            );
+        }
+
+        // Validate version is >= 0.2.0 and < 1.0.0
+        if (versionInts[0] != 0 || versionInts[1] < 2 || versionInts[2] < 0)
+        {
+            throw new Exception($"Unsupported PowerSync extension version. Need >=0.2.0 <1.0.0, got: {sdkVersion}");
+        }
+    }
+
+    private record LastSyncedResult(string? synced_at);
+
+    protected async Task UpdateHasSynced()
+    {
+        var result = await Database.Get<LastSyncedResult>("SELECT powersync_last_synced_at() as synced_at");
+
+        var hasSynced = result.synced_at != null;
+        DateTime? syncedAt = result.synced_at != null ? DateTime.Parse(result.synced_at + "Z") : null;
+
+        if (hasSynced != CurrentStatus.HasSynced)
+        {
+            CurrentStatus = new SyncStatus(new SyncStatusOptions(CurrentStatus.Options)
+            {
+                HasSynced = hasSynced,
+                LastSyncedAt = syncedAt
+            });
+
+            Emit(new PowerSyncDBEvent { StatusChanged = CurrentStatus });
+        }
+    }
+
+    /// <summary> 
+    /// Replace the schema with a new version. This is for advanced use cases - typically the schema should just be specified once in the constructor.
+    /// Cannot be used while connected - this should only be called before <see cref="Connect"/>.
+    /// </summary>
+    public async Task UpdateSchema(Schema schema)
+    {
+        if (syncStreamImplementation != null)
+        {
+            throw new Exception("Cannot update schema while connected");
+        }
+
+        try
+        {
+            // schema.Validate();
+        }
+        catch (Exception ex)
+        {
+            Logger.LogWarning("Schema validation failed. Unexpected behavior could occur: {Exception}", ex);
+        }
+
+        this.schema = schema;
+        await Database.Execute("SELECT powersync_replace_schema(?)", [schema.ToJSON()]);
+        await Database.RefreshSchema();
+        Emit(new PowerSyncDBEvent { SchemaChanged = schema });
+    }
+
+    /// <summary>
+    /// Wait for initialization to complete.
+    /// While initializing is automatic, this helps to catch and report initialization errors.
+    /// </summary>
+    public async Task Init()
+    {
+        await WaitForReady();
+    }
+
+    private RequiredAdditionalConnectionOptions resolveConnectionOptions(PowerSyncConnectionOptions? options)
+    {
+        var defaults = RequiredAdditionalConnectionOptions.DEFAULT_ADDITIONAL_CONNECTION_OPTIONS;
+        return new RequiredAdditionalConnectionOptions
+        {
+            RetryDelayMs = options?.RetryDelayMs ?? defaults.RetryDelayMs,
+            CrudUploadThrottleMs = options?.CrudUploadThrottleMs ?? defaults.CrudUploadThrottleMs,
+        };
+    }
+
+    public async Task Connect(IPowerSyncBackendConnector connector, PowerSyncConnectionOptions? options = null)
+    {
+        await WaitForReady();
+
+        // close connection if one is open
+        await Disconnect();
+        if (Closed)
+        {
+            throw new Exception("Cannot connect using a closed client");
+        }
+
+        var resolvedOptions = resolveConnectionOptions(options);
+        syncStreamImplementation = new StreamingSyncImplementation(new StreamingSyncImplementationOptions
+        {
+            Adapter = BucketStorageAdapter,
+            Remote = new Remote(connector),
+            UploadCrud = async () =>
+            {
+                await WaitForReady();
+                await connector.UploadData(this);
+            },
+            RetryDelayMs = resolvedOptions.RetryDelayMs,
+            CrudUploadThrottleMs = resolvedOptions.CrudUploadThrottleMs,
+            Logger = Logger
+        });
+
+        syncStreamStatusCts = new CancellationTokenSource();
+        var _ = Task.Run(() =>
+        {
+            foreach (var update in syncStreamImplementation.Listen(syncStreamStatusCts.Token))
+            {
+                if (update.StatusChanged != null)
+                {
+                    CurrentStatus = new SyncStatus(new SyncStatusOptions(update.StatusChanged.Options)
+                    {
+                        HasSynced = CurrentStatus?.HasSynced == true || update.StatusChanged.LastSyncedAt != null,
+                    });
+                    Emit(new PowerSyncDBEvent { StatusChanged = CurrentStatus });
+                }
+            }
+        });
+
+        await syncStreamImplementation.WaitForReady();
+        syncStreamImplementation.TriggerCrudUpload();
+        await syncStreamImplementation.Connect(options);
+    }
+
+    public async Task Disconnect()
+    {
+        await WaitForReady();
+        if (syncStreamImplementation != null)
+        {
+            await syncStreamImplementation.Disconnect();
+            syncStreamImplementation.Close();
+            syncStreamImplementation = null;
+        }
+        syncStreamStatusCts?.Cancel();
+    }
+
+    public async Task DisconnectAndClear()
+    {
+        await Disconnect();
+        await WaitForReady();
+
+        // TODO CL bool clearLocal = options?.ClearLocal ?? false;
+        bool clearLocal = true;
+
+        await Database.WriteTransaction(async tx =>
+        {
+            await tx.Execute("SELECT powersync_clear(?)", [clearLocal ? 1 : 0]);
+        });
+
+        // The data has been deleted - reset the sync status
+        CurrentStatus = new SyncStatus(new SyncStatusOptions());
+        Emit(new PowerSyncDBEvent { StatusChanged = CurrentStatus });
+    }
+
+    public new async Task Close()
+    {
+        base.Close();
+        await WaitForReady();
+
+        // TODO CL
+        // if (options.Disconnect)
+        // {
+        //     await Disconnect();
+        // }
+
+        syncStreamImplementation?.Close();
+        BucketStorageAdapter?.Close();
+
+        Database.Close();
+        Closed = true;
+    }
+
+    /// <summary>
+    /// Get a batch of crud data to upload.
+    /// <para />
+    /// Returns null if there is no data to upload.
+    /// <para />
+    /// Use this from the <see cref="IPowerSyncBackendConnector.UploadData"/> callback.
+    ///
+    /// Once the data have been successfully uploaded, call <see cref="CrudBatch.Complete"/> before
+    /// requesting the next batch.
+    /// <para />
+    /// Use <paramref name="limit"/> to specify the maximum number of updates to return in a single
+    /// batch.
+    /// <para />
+    /// This method does include transaction ids in the result, but does not group
+    /// data by transaction. One batch may contain data from multiple transactions,
+    /// and a single transaction may be split over multiple batches.
+    /// </summary>
+    public async Task<CrudBatch?> GetCrudBatch(int limit = 100)
+    {
+        var crudResult = await GetAll<CrudEntryJSON>($"SELECT id, tx_id, data FROM {PSInternalTable.CRUD} ORDER BY id ASC LIMIT ?", [limit + 1]);
+
+        var all = crudResult.Select(CrudEntry.FromRow).ToList();
+
+        var haveMore = false;
+        if (all.Count > limit)
+        {
+            all.RemoveAt(all.Count - 1);
+            haveMore = true;
+        }
+        if (all.Count == 0)
+        {
+            return null;
+        }
+
+        var last = all[all.Count - 1];
+
+        return new CrudBatch(
+            [.. all],
+            haveMore,
+            async writeCheckpoint => await HandleCrudCheckpoint(last.ClientId, writeCheckpoint)
+     );
+    }
+
+    /// <summary>
+    /// Get the next recorded transaction to upload.
+    /// <para />
+    /// Returns null if there is no data to upload.
+    ///
+    /// Use this from the <see cref="IPowerSyncBackendConnector.UploadData"/> callback.
+    /// <para />
+    /// Once the data have been successfully uploaded, call <see cref="CrudTransaction.Complete"/> before
+    /// requesting the next transaction.
+    /// <para />
+    /// Unlike <see cref="GetCrudBatch"/>, this only returns data from a single transaction at a time.
+    /// All data for the transaction is loaded into memory.
+    /// </summary>
+    public async Task<CrudTransaction?> GetNextCrudTransaction()
+    {
+        return await Database.ReadTransaction(async tx =>
+        {
+            var first = await tx.GetOptional<CrudEntryJSON>(
+            $"SELECT id, tx_id, data FROM {PSInternalTable.CRUD} ORDER BY id ASC LIMIT 1");
+
+            if (first == null)
+            {
+                return null;
+            }
+
+            long? txId = first.TransactionId ?? null;
+            List<CrudEntry> all;
+
+            if (txId == null)
+            {
+                all = [CrudEntry.FromRow(first)];
+            }
+            else
+            {
+                var result = await tx.GetAll<CrudEntryJSON>(
+                    $"SELECT id, tx_id, data FROM {PSInternalTable.CRUD} WHERE tx_id = ? ORDER BY id ASC",
+                    [txId]);
+
+                all = result.Select(CrudEntry.FromRow).ToList();
+            }
+
+            var last = all.Last();
+            return new CrudTransaction(
+                [.. all],
+                async writeCheckpoint => await HandleCrudCheckpoint(last.ClientId, writeCheckpoint),
+                txId
+            );
+        });
+    }
+
+    public async Task HandleCrudCheckpoint(long lastClientId, string? writeCheckpoint = null)
+    {
+        await Database.WriteTransaction(async (tx) =>
+        {
+            await tx.Execute($"DELETE FROM {PSInternalTable.CRUD} WHERE id <= ?", [lastClientId]);
+            if (!string.IsNullOrEmpty(writeCheckpoint))
+            {
+                var check = await tx.GetAll<object>($"SELECT 1 FROM {PSInternalTable.CRUD} LIMIT 1");
+                if (check.Length == 0)
+                {
+
+                    await tx.Execute($"UPDATE {PSInternalTable.BUCKETS} SET target_op = CAST(? as INTEGER) WHERE name='$local'", [
+                      writeCheckpoint
+                    ]);
+                }
+            }
+            else
+            {
+                await tx.Execute(
+                    $"UPDATE {PSInternalTable.BUCKETS} SET target_op = CAST(? as INTEGER) WHERE name = '$local'",
+                    [BucketStorageAdapter.GetMaxOpId()]);
+            }
+        });
+    }
+
+    /// <summary>
+    /// Get an unique client id for this database.
+    ///
+    /// The id is not reset when the database is cleared, only when the database is deleted.
+    /// </summary>
+    public async Task<string> GetClientId()
+    {
+        return await BucketStorageAdapter.GetClientId();
+    }
+
+    public async Task<NonQueryResult> Execute(string query, object[]? parameters = null)
+    {
+        await WaitForReady();
+        return await Database.Execute(query, parameters);
+    }
+
+    public async Task<T[]> GetAll<T>(string query, object[]? parameters = null)
+    {
+        await WaitForReady();
+        return await Database.GetAll<T>(query, parameters);
+    }
+
+    public async Task<T?> GetOptional<T>(string query, object[]? parameters = null)
+    {
+        await WaitForReady();
+        return await Database.GetOptional<T>(query, parameters);
+    }
+    public async Task<T> Get<T>(string query, object[]? parameters = null)
+    {
+        await WaitForReady();
+        return await Database.Get<T>(query, parameters);
+    }
+
+
+    /// <summary>
+    /// Executes a read query every time the source tables are modified.
+    /// <para />
+    /// Use <see cref="SQLWatchOptions.ThrottleMs"/> to specify the minimum interval between queries.
+    /// Source tables are automatically detected using <c>EXPLAIN QUERY PLAN</c>.
+    /// </summary>
+    public void Watch<T>(string query, object[]? parameters, WatchHandler<T> handler, SQLWatchOptions? options = null)
+    {
+        Task.Run(async () =>
+        {
+            try
+            {
+                var resolvedTables = await ResolveTables(query, parameters, options);
+                var result = await GetAll<T>(query, parameters);
+                handler.OnResult(result);
+
+                OnChange(new WatchOnChangeHandler
+                {
+                    OnChange = async (change) =>
+                    {
+                        try
+                        {
+                            var result = await GetAll<T>(query, parameters);
+                            handler.OnResult(result);
+                        }
+                        catch (Exception ex)
+                        {
+                            handler.OnError?.Invoke(ex);
+                        }
+                    },
+                    OnError = handler.OnError
+                }, new SQLWatchOptions
+                {
+                    Tables = resolvedTables,
+                    Signal = options?.Signal,
+                    ThrottleMs = options?.ThrottleMs
+                });
+            }
+            catch (Exception ex)
+            {
+                handler.OnError?.Invoke(ex);
+            }
+        });
+    }
+
+    private record ExplainedResult(string opcode, int p2, int p3);
+    private record TableSelectResult(string tbl_name);
+    public async Task<string[]> ResolveTables(string sql, object[]? parameters = null, SQLWatchOptions? options = null)
+    {
+        List<string> resolvedTables = options?.Tables != null ? [.. options.Tables] : [];
+
+        if (options?.Tables == null)
+        {
+            var explained = await GetAll<ExplainedResult>(
+                $"EXPLAIN {sql}", parameters
+            );
+
+            var rootPages = explained
+                .Where(row => row.opcode == "OpenRead" && row.p3 == 0)
+                .Select(row => row.p2)
+                .ToList();
+
+
+            var tables = await GetAll<TableSelectResult>(
+                "SELECT DISTINCT tbl_name FROM sqlite_master WHERE rootpage IN (SELECT json_each.value FROM json_each(?))",
+                [JsonConvert.SerializeObject(rootPages)]
+            );
+
+            foreach (var table in tables)
+            {
+                resolvedTables.Add(POWERSYNC_TABLE_MATCH.Replace(table.tbl_name, ""));
+            }
+        }
+
+        return [.. resolvedTables];
+    }
+
+    /// <summary>
+    /// Invokes the provided callback whenever any of the specified tables are modified.
+    /// <para />
+    /// This is preferred over <see cref="Watch"/> when multiple queries need to be performed
+    /// together in response to data changes.
+    /// </summary>
+    public void OnChange(WatchOnChangeHandler handler, SQLWatchOptions? options = null)
+    {
+        var resolvedOptions = options ?? new SQLWatchOptions();
+
+        string[] tables = resolvedOptions.Tables ?? [];
+        HashSet<string> watchedTables = [.. tables.SelectMany(table => new[] { table, $"ps_data__{table}", $"ps_data_local__{table}" })];
+
+        var changedTables = new HashSet<string>();
+        var resolvedThrottleMs = resolvedOptions.ThrottleMs ?? DEFAULT_WATCH_THROTTLE_MS;
+
+        void flushTableUpdates()
+        {
+            HandleTableChanges(changedTables, watchedTables, (intersection) =>
+            {
+                if (resolvedOptions?.Signal?.IsCancellationRequested == true) return;
+                handler.OnChange(new WatchOnChangeEvent { ChangedTables = intersection });
+            });
+        }
+
+        var cts = Database.RunListener((update) =>
+        {
+            if (update.TablesUpdated != null)
+            {
+                try
+                {
+                    ProcessTableUpdates(update.TablesUpdated, changedTables);
+                    flushTableUpdates();
+                }
+                catch (Exception ex)
+                {
+                    handler?.OnError?.Invoke(ex);
+                }
+            }
+        });
+
+        if (options?.Signal.HasValue == true)
+        {
+            options.Signal.Value.Register(() =>
+            {
+                cts.Cancel();
+            });
+        }
+    }
+
+    private static void HandleTableChanges(HashSet<string> changedTables, HashSet<string> watchedTables, Action<string[]> onDetectedChanges)
+    {
+        if (changedTables.Count > 0)
+        {
+            var intersection = changedTables.Where(watchedTables.Contains).ToArray();
+            if (intersection.Length > 0)
+            {
+                onDetectedChanges(intersection);
+            }
+        }
+        changedTables.Clear();
+    }
+
+    private static void ProcessTableUpdates(INotification updateNotification, HashSet<string> changedTables)
+    {
+        string[] tables = [];
+        if (updateNotification is BatchedUpdateNotification batchedUpdate)
+        {
+            tables = batchedUpdate.Tables;
+        }
+        else if (updateNotification is UpdateNotification singleUpdate)
+        {
+            tables = [singleUpdate.Table];
+        }
+
+        foreach (var table in tables)
+        {
+            changedTables.Add(table);
+        }
+    }
+}
+
+public class SQLWatchOptions
+{
+    public CancellationToken? Signal { get; set; }
+    public string[]? Tables { get; set; }
+
+    /// <summary>
+    /// The minimum interval between queries in milliseconds.
+    /// </summary>
+    public int? ThrottleMs { get; set; }
+}
+
+public class WatchHandler<T>
+{
+    public Action<T[]> OnResult { get; set; } = null!;
+    public Action<Exception>? OnError { get; set; }
+}
+
+public class WatchOnChangeEvent
+{
+    public string[] ChangedTables { get; set; } = [];
+}
+
+public class WatchOnChangeHandler
+{
+    public Func<WatchOnChangeEvent, Task> OnChange { get; set; } = null!;
+    public Action<Exception>? OnError { get; set; }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs b/PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs
new file mode 100644
index 0000000..c91abdc
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs
@@ -0,0 +1,24 @@
+namespace PowerSync.Common.Client;
+
+using PowerSync.Common.DB;
+
+public class SQLOpenOptions : DatabaseSource
+{
+    /// <summary>
+    /// Filename for the database.
+    /// </summary>
+    public string DbFilename { get; set; } = "";
+
+    /// <summary>
+    /// Directory where the database file is located.
+    /// </summary>
+    public string? DbLocation { get; set; }
+}
+
+public interface ISQLOpenFactory
+{
+    /// <summary>
+    /// Opens a connection adapter to a SQLite Database.
+    /// </summary>
+    IDBAdapter OpenDatabase();
+}
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs
new file mode 100644
index 0000000..aa76d58
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/BucketStorageAdapter.cs
@@ -0,0 +1,116 @@
+
+namespace PowerSync.Common.Client.Sync.Bucket;
+
+using System;
+using System.Threading.Tasks;
+
+using PowerSync.Common.DB.Crud;
+using PowerSync.Common.Utils;
+using Newtonsoft.Json;
+
+public class Checkpoint
+{
+    [JsonProperty("last_op_id")]
+    public string LastOpId { get; set; } = null!;
+
+    [JsonProperty("buckets")]
+    public BucketChecksum[] Buckets { get; set; } = [];
+
+    [JsonProperty("write_checkpoint")]
+    public string? WriteCheckpoint { get; set; } = null;
+}
+
+public class BucketState
+{
+    [JsonProperty("bucket")]
+    public string Bucket { get; set; } = null!;
+
+    [JsonProperty("op_id")]
+    public string OpId { get; set; } = null!;
+}
+
+public class SyncLocalDatabaseResult
+{
+    [JsonProperty("ready")]
+    public bool Ready { get; set; }
+
+    [JsonProperty("checkpointValid")]
+    public bool CheckpointValid { get; set; }
+
+    [JsonProperty("checkpointFailures")]
+    public string[]? CheckpointFailures { get; set; }
+
+    public override bool Equals(object? obj)
+    {
+        if (obj is not SyncLocalDatabaseResult other) return false;
+        return JsonConvert.SerializeObject(this) == JsonConvert.SerializeObject(other);
+    }
+
+    public override int GetHashCode()
+    {
+        return JsonConvert.SerializeObject(this).GetHashCode();
+    }
+}
+
+public class BucketChecksum
+{
+    [JsonProperty("bucket")]
+    public string Bucket { get; set; } = null!;
+
+    [JsonProperty("checksum")]
+    public long Checksum { get; set; }
+
+    /// <summary>
+    /// Count of operations - informational only.
+    /// </summary>
+    [JsonProperty("count")]
+    public int? Count { get; set; }
+}
+
+public static class PSInternalTable
+{
+    public static readonly string DATA = "ps_data";
+    public static readonly string CRUD = "ps_crud";
+    public static readonly string BUCKETS = "ps_buckets";
+    public static readonly string OPLOG = "ps_oplog";
+    public static readonly string UNTYPED = "ps_untyped";
+}
+
+public class BucketStorageEvent
+{
+    public bool CrudUpdate { get; set; }
+}
+
+public interface IBucketStorageAdapter : IEventStream<BucketStorageEvent>
+{
+    Task Init();
+    Task SaveSyncData(SyncDataBatch batch);
+    Task RemoveBuckets(string[] buckets);
+    Task SetTargetCheckpoint(Checkpoint checkpoint);
+
+    void StartSession();
+
+    Task<BucketState[]> GetBucketStates();
+
+    Task<SyncLocalDatabaseResult> SyncLocalDatabase(Checkpoint checkpoint);
+
+    Task<CrudEntry?> NextCrudItem();
+    Task<bool> HasCrud();
+    Task<CrudBatch?> GetCrudBatch(int limit = 100);
+
+    Task<bool> HasCompletedSync();
+    Task<bool> UpdateLocalTarget(Func<Task<string>> callback);
+
+    /// <summary>
+    /// Exposed for tests only.
+    /// </summary>
+    Task AutoCompact();
+    Task ForceCompact();
+
+    string GetMaxOpId();
+
+    /// <summary>
+    /// Get a unique client ID.
+    /// </summary>
+    Task<string> GetClientId();
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/OpType.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/OpType.cs
new file mode 100644
index 0000000..84b7427
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/OpType.cs
@@ -0,0 +1,34 @@
+namespace PowerSync.Common.Client.Sync.Bucket;
+
+using System;
+
+using Newtonsoft.Json;
+using Newtonsoft.Json.Converters;
+
+[JsonConverter(typeof(StringEnumConverter))]
+public enum OpTypeEnum
+{
+    CLEAR = 1,
+    MOVE = 2,
+    PUT = 3,
+    REMOVE = 4
+}
+
+public class OpType(OpTypeEnum value)
+{
+    public OpTypeEnum Value { get; } = value;
+
+    public static OpType FromJSON(string jsonValue)
+    {
+        if (Enum.TryParse<OpTypeEnum>(jsonValue, out var enumValue))
+        {
+            return new OpType(enumValue);
+        }
+        throw new ArgumentException($"Invalid JSON value for OpTypeEnum: {jsonValue}");
+    }
+
+    public string ToJSON()
+    {
+        return JsonConvert.SerializeObject(Value).Trim('"'); // Ensures it's a string without extra quotes
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/OplogEntry.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/OplogEntry.cs
new file mode 100644
index 0000000..b2b95b3
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/OplogEntry.cs
@@ -0,0 +1,75 @@
+namespace PowerSync.Common.Client.Sync.Bucket;
+
+using Newtonsoft.Json;
+
+public class OplogEntryJSON
+{
+    [JsonProperty("checksum")]
+    public long Checksum { get; set; }
+
+    [JsonProperty("data")]
+    public object? Data { get; set; }
+
+    [JsonProperty("object_id")]
+    public string? ObjectId { get; set; }
+
+    [JsonProperty("object_type")]
+    public string? ObjectType { get; set; }
+
+    [JsonProperty("op_id")]
+    public string OpId { get; set; } = null!;
+
+    [JsonProperty("op")]
+    public string Op { get; set; } = null!;
+
+    [JsonProperty("subkey")]
+    public object? Subkey { get; set; }
+}
+
+public class OplogEntry(
+    string opId,
+    OpType op,
+    long checksum,
+    string subkey,
+    string? objectType = null,
+    string? objectId = null,
+    object? data = null
+    )
+{
+    public string OpId { get; private set; } = opId;
+    public OpType Op { get; private set; } = op;
+    public long Checksum { get; private set; } = checksum;
+    public string Subkey { get; private set; } = subkey;
+    public string? ObjectType { get; private set; } = objectType;
+    public string? ObjectId { get; private set; } = objectId;
+    public object? Data { get; private set; } = data;
+
+    public static OplogEntry FromRow(OplogEntryJSON row)
+    {
+        return new OplogEntry(
+            row.OpId,
+            OpType.FromJSON(row.Op),
+            row.Checksum,
+            row.Subkey is string subkey ? subkey : JsonConvert.SerializeObject(row.Subkey),
+            row.ObjectType,
+            row.ObjectId,
+            row.Data
+        );
+    }
+
+    public string ToJSON()
+    {
+        var jsonObject = new OplogEntryJSON
+        {
+            OpId = OpId,
+            Op = Op.ToJSON(),
+            Checksum = Checksum,
+            Data = Data,
+            ObjectType = ObjectType,
+            ObjectId = ObjectId,
+            Subkey = Subkey
+        };
+
+        return JsonConvert.SerializeObject(jsonObject, Formatting.None);
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs
new file mode 100644
index 0000000..495f6e0
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SqliteBucketStorage.cs
@@ -0,0 +1,437 @@
+namespace PowerSync.Common.Client.Sync.Bucket;
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading.Tasks;
+
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+
+using Newtonsoft.Json;
+
+using PowerSync.Common.DB;
+using PowerSync.Common.DB.Crud;
+using PowerSync.Common.Utils;
+
+public class SqliteBucketStorage : EventStream<BucketStorageEvent>, IBucketStorageAdapter
+{
+
+    public static readonly string MAX_OP_ID = "9223372036854775807";
+
+    private readonly IDBAdapter db;
+    private bool hasCompletedSync;
+    private bool pendingBucketDeletes;
+    private readonly HashSet<string> tableNames;
+    private string? clientId;
+
+    private static readonly int COMPACT_OPERATION_INTERVAL = 1000;
+    private int compactCounter = COMPACT_OPERATION_INTERVAL;
+
+    private ILogger logger;
+
+    private CancellationTokenSource updateCts;
+
+    private record ExistingTableRowsResult(string name);
+
+    public SqliteBucketStorage(IDBAdapter db, ILogger? logger = null)
+    {
+        this.db = db;
+        this.logger = logger ?? NullLogger.Instance; ;
+        hasCompletedSync = false;
+        pendingBucketDeletes = true;
+        tableNames = [];
+
+        updateCts = new CancellationTokenSource();
+
+        var _ = Task.Run(() =>
+        {
+            foreach (var update in db.Listen(updateCts.Token))
+            {
+                if (update.TablesUpdated != null)
+                {
+                    var tables = DBAdapterUtils.ExtractTableUpdates(update.TablesUpdated);
+                    if (tables.Contains(PSInternalTable.CRUD))
+                    {
+                        Emit(new BucketStorageEvent { CrudUpdate = true });
+                    }
+                }
+            }
+        });
+    }
+
+    public async Task Init()
+    {
+
+        hasCompletedSync = false;
+        var existingTableRows = await db.GetAll<ExistingTableRowsResult>("SELECT name FROM sqlite_master WHERE type='table' AND name GLOB 'ps_data_*'");
+
+        foreach (var row in existingTableRows)
+        {
+            tableNames.Add(row.name);
+        }
+    }
+
+    public new void Close()
+    {
+        updateCts.Cancel();
+        base.Close();
+    }
+
+    private record ClientIdResult(string? client_id);
+    public async Task<string> GetClientId()
+    {
+        if (clientId == null)
+        {
+            var row = await db.Get<ClientIdResult>("SELECT powersync_client_id() as client_id");
+            clientId = row.client_id ?? "";
+        }
+
+        return clientId;
+    }
+
+    public string GetMaxOpId()
+    {
+        return MAX_OP_ID;
+    }
+
+    public void StartSession() { }
+
+    public async Task<BucketState[]> GetBucketStates()
+    {
+        return
+            await db.GetAll<BucketState>("SELECT name as bucket, cast(last_op as TEXT) as op_id FROM ps_buckets WHERE pending_delete = 0 AND name != '$local'");
+    }
+
+    public async Task SaveSyncData(SyncDataBatch batch)
+    {
+        await db.WriteTransaction(async tx =>
+        {
+            int count = 0;
+            foreach (var b in batch.Buckets)
+            {
+                var result = await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)",
+                    ["save", JsonConvert.SerializeObject(new { buckets = new[] { b.ToJSON() } })]);
+                logger.LogDebug("saveSyncData {message}", JsonConvert.SerializeObject(result));
+                count += b.Data.Length;
+            }
+            compactCounter += count;
+        });
+    }
+
+    public async Task RemoveBuckets(string[] buckets)
+    {
+        foreach (var bucket in buckets)
+        {
+            await DeleteBucket(bucket);
+        }
+    }
+
+    private async Task DeleteBucket(string bucket)
+    {
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)",
+                ["delete_bucket", bucket]);
+        });
+
+        logger.LogDebug("Done deleting bucket");
+        pendingBucketDeletes = true;
+    }
+
+    private record LastSyncedResult(string? synced_at);
+    public async Task<bool> HasCompletedSync()
+    {
+        if (hasCompletedSync) return true;
+
+        var result = await db.Get<LastSyncedResult>("SELECT powersync_last_synced_at() as synced_at");
+
+        hasCompletedSync = result.synced_at != null;
+        return hasCompletedSync;
+    }
+
+    public async Task<SyncLocalDatabaseResult> SyncLocalDatabase(Checkpoint checkpoint)
+    {
+        var validation = await ValidateChecksums(checkpoint);
+        if (!validation.CheckpointValid)
+        {
+            logger.LogError("Checksums failed for {failures}", JsonConvert.SerializeObject(validation.CheckpointFailures));
+            foreach (var failedBucket in validation.CheckpointFailures ?? [])
+            {
+                await DeleteBucket(failedBucket);
+            }
+            return new SyncLocalDatabaseResult
+            {
+                Ready = false,
+                CheckpointValid = false,
+                CheckpointFailures = validation.CheckpointFailures
+            };
+        }
+
+        var bucketNames = checkpoint.Buckets.Select(b => b.Bucket).ToArray();
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute(
+                "UPDATE ps_buckets SET last_op = ? WHERE name IN (SELECT json_each.value FROM json_each(?))",
+                [checkpoint.LastOpId, JsonConvert.SerializeObject(bucketNames)]
+            );
+
+            if (checkpoint.WriteCheckpoint != null)
+            {
+                await tx.Execute(
+                    "UPDATE ps_buckets SET last_op = ? WHERE name = '$local'",
+                    [checkpoint.WriteCheckpoint]
+                );
+            }
+        });
+
+        var valid = await UpdateObjectsFromBuckets(checkpoint);
+        if (!valid)
+        {
+            logger.LogDebug("Not at a consistent checkpoint - cannot update local db");
+            return new SyncLocalDatabaseResult
+            {
+                Ready = false,
+                CheckpointValid = true
+            };
+        }
+
+        await ForceCompact();
+
+        return new SyncLocalDatabaseResult
+        {
+            Ready = true,
+            CheckpointValid = true
+        };
+    }
+
+    private async Task<bool> UpdateObjectsFromBuckets(Checkpoint checkpoint)
+    {
+        return await db.WriteTransaction(async tx =>
+        {
+            var result = await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES(?, ?)",
+                                           ["sync_local", ""]);
+
+            return result.InsertId == 1;
+        });
+    }
+
+    private record ResultResult(object result);
+
+    public class ResultDetail
+    {
+        [JsonProperty("valid")]
+        public bool Valid { get; set; }
+
+        [JsonProperty("failed_buckets")]
+        public List<string>? FailedBuckets { get; set; }
+    }
+
+    public async Task<SyncLocalDatabaseResult> ValidateChecksums(
+        Checkpoint checkpoint)
+    {
+        var result = await db.Get<ResultResult>("SELECT powersync_validate_checkpoint(?) as result",
+                [JsonConvert.SerializeObject(checkpoint)]);
+
+        logger.LogDebug("validateChecksums result item {message}", JsonConvert.SerializeObject(result));
+
+        if (result == null) return new SyncLocalDatabaseResult { CheckpointValid = false, Ready = false };
+
+        var resultDetail = JsonConvert.DeserializeObject<ResultDetail>(result.result.ToString() ?? "{}");
+
+        if (resultDetail?.Valid == true)
+        {
+            return new SyncLocalDatabaseResult { Ready = true, CheckpointValid = true };
+        }
+        else
+        {
+            return new SyncLocalDatabaseResult
+            {
+                CheckpointValid = false,
+                Ready = false,
+                CheckpointFailures = resultDetail?.FailedBuckets?.ToArray() ?? []
+            };
+        }
+    }
+
+    /// <summary>
+    /// Force a compact operation, primarily for testing purposes.
+    /// </summary>
+    public async Task ForceCompact()
+    {
+        compactCounter = COMPACT_OPERATION_INTERVAL;
+        pendingBucketDeletes = true;
+
+        await AutoCompact();
+    }
+
+    public async Task AutoCompact()
+    {
+        await DeletePendingBuckets();
+        await ClearRemoveOps();
+    }
+
+    private async Task DeletePendingBuckets()
+    {
+        if (!pendingBucketDeletes) return;
+
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES (?, ?)",
+                ["delete_pending_buckets", ""]);
+        });
+
+        pendingBucketDeletes = false;
+    }
+
+    private async Task ClearRemoveOps()
+    {
+        if (compactCounter < COMPACT_OPERATION_INTERVAL) return;
+
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO powersync_operations(op, data) VALUES (?, ?)",
+                ["clear_remove_ops", ""]);
+        });
+
+        compactCounter = 0;
+    }
+
+    private record TargetOpResult(string target_op);
+    private record SequenceResult(int seq);
+
+    public async Task<bool> UpdateLocalTarget(Func<Task<string>> callback)
+    {
+        var rs1 = await db.GetAll<TargetOpResult>(
+            "SELECT target_op FROM ps_buckets WHERE name = '$local' AND target_op = CAST(? as INTEGER)",
+            [GetMaxOpId()]
+        );
+
+        if (rs1.Length == 0)
+        {
+            // Nothing to update
+            return false;
+        }
+
+        var rs = await db.GetAll<SequenceResult>(
+            "SELECT seq FROM sqlite_sequence WHERE name = 'ps_crud'"
+        );
+
+        if (rs.Length == 0)
+        {
+            // Nothing to update
+            return false;
+        }
+
+        int seqBefore = rs[0].seq;
+        string opId = await callback();
+
+        logger.LogDebug("[updateLocalTarget] Updating target to checkpoint {message}", opId);
+
+        return await db.WriteTransaction(async tx =>
+        {
+            var anyData = await tx.Execute("SELECT 1 FROM ps_crud LIMIT 1");
+            if (anyData.RowsAffected > 0)
+            {
+                logger.LogDebug("[updateLocalTarget] ps crud is not empty");
+                return false;
+            }
+
+            var rsAfter = await tx.GetAll<SequenceResult>(
+                "SELECT seq FROM sqlite_sequence WHERE name = 'ps_crud'"
+            );
+
+            if (rsAfter.Length == 0)
+            {
+                throw new Exception("SQLite Sequence should not be empty");
+            }
+
+            int seqAfter = rsAfter[0].seq;
+            logger.LogDebug("[updateLocalTarget] seqAfter: {seq}", seqAfter);
+
+            if (seqAfter != seqBefore)
+            {
+                logger.LogDebug("[updateLocalTarget] seqAfter ({seqAfter}) != seqBefore ({seqBefore})", seqAfter, seqBefore);
+                return false;
+            }
+
+            var response = await tx.Execute(
+               "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'",
+               [opId]
+           );
+
+            logger.LogDebug("[updateLocalTarget] Response from updating target_op: {response}", JsonConvert.SerializeObject(response));
+            return true;
+        });
+    }
+
+    /// <summary>
+    /// Get a batch of objects to send to the server.
+    /// When the objects are successfully sent to the server, call .Complete().
+    /// </summary>
+    public async Task<CrudBatch?> GetCrudBatch(int limit = 100)
+    {
+        if (!await HasCrud())
+        {
+            return null;
+        }
+
+        var crudResult = await db.GetAll<CrudEntryJSON>("SELECT * FROM ps_crud ORDER BY id ASC LIMIT ?", [limit]);
+
+        var all = crudResult.Select(CrudEntry.FromRow).ToArray();
+
+        if (all.Length == 0)
+        {
+            return null;
+        }
+
+        var last = all[all.Length - 1];
+
+        return new CrudBatch(
+        Crud: all,
+        HaveMore: true,
+        CompleteCallback: async (string? writeCheckpoint) =>
+        {
+            await db.WriteTransaction(async tx =>
+            {
+                await tx.Execute("DELETE FROM ps_crud WHERE id <= ?", [last.ClientId]);
+
+                if (!string.IsNullOrEmpty(writeCheckpoint))
+                {
+                    var crudResult = await tx.GetAll<object>("SELECT 1 FROM ps_crud LIMIT 1");
+                    if (crudResult?.Length > 0)
+                    {
+                        await tx.Execute(
+                            "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'",
+                            [writeCheckpoint]);
+                    }
+                }
+                else
+                {
+                    await tx.Execute(
+                        "UPDATE ps_buckets SET target_op = CAST(? as INTEGER) WHERE name='$local'",
+                        [GetMaxOpId()]);
+                }
+            });
+        }
+    );
+    }
+
+    public async Task<CrudEntry?> NextCrudItem()
+    {
+        var next = await db.GetOptional<CrudEntryJSON>("SELECT * FROM ps_crud ORDER BY id ASC LIMIT 1");
+
+        return next != null ? CrudEntry.FromRow(next) : null;
+    }
+
+    public async Task<bool> HasCrud()
+    {
+        return await db.GetOptional<object>("SELECT 1 as ignore FROM ps_crud LIMIT 1") != null;
+    }
+
+    public async Task SetTargetCheckpoint(Checkpoint checkpoint)
+    {
+        // No Op
+        await Task.CompletedTask;
+    }
+}
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBatch.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBatch.cs
new file mode 100644
index 0000000..be5cc99
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBatch.cs
@@ -0,0 +1,6 @@
+namespace PowerSync.Common.Client.Sync.Bucket;
+
+public class SyncDataBatch(SyncDataBucket[] buckets)
+{
+    public SyncDataBucket[] Buckets { get; private set; } = buckets;
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBucket.cs b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBucket.cs
new file mode 100644
index 0000000..7699778
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Bucket/SyncDataBucket.cs
@@ -0,0 +1,74 @@
+namespace PowerSync.Common.Client.Sync.Bucket;
+
+using System.Collections.Generic;
+using System.Linq;
+using Newtonsoft.Json;
+
+public class SyncDataBucketJSON
+{
+    [JsonProperty("bucket")]
+    public string Bucket { get; set; } = null!;
+
+    [JsonProperty("has_more")]
+    public bool? HasMore { get; set; }
+
+    [JsonProperty("after")]
+    public string? After { get; set; }
+
+    [JsonProperty("next_after")]
+    public string? NextAfter { get; set; }
+
+    [JsonProperty("data")]
+    public List<object> Data { get; set; } = [];
+}
+
+public class SyncDataBucket(
+    string bucket,
+    OplogEntry[] data,
+    bool hasMore,
+    string? after = null,
+    string? nextAfter = null)
+{
+    public string Bucket { get; private set; } = bucket;
+    public OplogEntry[] Data { get; private set; } = data;
+    public bool HasMore { get; private set; } = hasMore;
+    public string? After { get; private set; } = after;
+    public string? NextAfter { get; private set; } = nextAfter;
+
+    public static SyncDataBucket FromRow(SyncDataBucketJSON row)
+    {
+        var dataEntries = row.Data != null
+            ? row.Data
+                .Select(obj => JsonConvert.DeserializeObject<OplogEntryJSON>(JsonConvert.SerializeObject(obj))!) // Convert object to JSON string, then deserialize
+                .Select(OplogEntry.FromRow)
+                .ToArray()
+            : [];
+
+        return new SyncDataBucket(
+            row.Bucket,
+            dataEntries,
+            row.HasMore ?? false,
+            row.After,
+            row.NextAfter
+        );
+    }
+
+    public string ToJSON()
+    {
+        List<object> dataObjects = Data
+         .Select(entry => JsonConvert.DeserializeObject<object>(entry.ToJSON()))
+         .Where(obj => obj != null)
+         .ToList()!;
+
+        var jsonObject = new SyncDataBucketJSON
+        {
+            Bucket = Bucket,
+            HasMore = HasMore,
+            After = After,
+            NextAfter = NextAfter,
+            Data = dataObjects
+        };
+
+        return JsonConvert.SerializeObject(jsonObject);
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
new file mode 100644
index 0000000..395555c
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
@@ -0,0 +1,187 @@
+namespace PowerSync.Common.Client.Sync.Stream;
+
+using System.IO;
+using System.Net.Http;
+using System.Reflection;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+
+using PowerSync.Common.Client.Connection;
+
+public class SyncStreamOptions
+{
+    public string Path { get; set; } = "";
+    public StreamingSyncRequest Data { get; set; } = new();
+    public Dictionary<string, string> Headers { get; set; } = new();
+
+    public CancellationToken CancellationToken { get; set; } = CancellationToken.None;
+}
+
+public class RequestDetails
+{
+    public string Url { get; set; } = "";
+    public Dictionary<string, string> Headers { get; set; } = new();
+}
+
+public class Remote
+{
+    private static int REFRESH_CREDENTIALS_SAFETY_PERIOD_MS = 30_000;
+    private readonly HttpClient httpClient;
+    protected IPowerSyncBackendConnector connector;
+
+    protected PowerSyncCredentials? credentials;
+
+    public Remote(IPowerSyncBackendConnector connector)
+    {
+        httpClient = new HttpClient();
+        this.connector = connector;
+    }
+
+    public async Task<PowerSyncCredentials?> GetCredentials()
+    {
+        if (credentials?.ExpiresAt > DateTime.Now.AddMilliseconds(REFRESH_CREDENTIALS_SAFETY_PERIOD_MS))
+        {
+            return credentials;
+        }
+
+        credentials = await connector.FetchCredentials();
+
+        // TODO CL trailing forward slash check
+        return credentials;
+    }
+
+    static string GetUserAgent()
+    {
+        object[] attributes = Assembly.GetExecutingAssembly()
+            .GetCustomAttributes(typeof(AssemblyInformationalVersionAttribute), false);
+
+        string fullInformationalVersion = attributes.Length == 0 ? "" : ((AssemblyInformationalVersionAttribute)attributes[0]).InformationalVersion;
+
+        // Remove the build metadata part (anything after the '+')
+        int plusIndex = fullInformationalVersion.IndexOf('+');
+        string version = plusIndex >= 0
+            ? fullInformationalVersion.Substring(0, plusIndex)
+            : fullInformationalVersion;
+
+        return $"powersync-dotnet/{version}";
+    }
+
+    public async Task<T> Get<T>(string path, Dictionary<string, string>? headers = null)
+    {
+        var request = await BuildRequest(HttpMethod.Get, path, data: null, additionalHeaders: headers);
+
+        using var client = new HttpClient();
+        var response = await client.SendAsync(request);
+
+        if (!response.IsSuccessStatusCode)
+        {
+            var errorMessage = await response.Content.ReadAsStringAsync();
+            throw new HttpRequestException($"Received {response.StatusCode} - {response.ReasonPhrase} when getting from {path}: {errorMessage}");
+        }
+
+        var responseData = await response.Content.ReadAsStringAsync();
+        return JsonConvert.DeserializeObject<T>(responseData)!;
+    }
+
+
+    public async IAsyncEnumerable<StreamingSyncLine?> PostStream(SyncStreamOptions options)
+    {
+        using var requestMessage = await BuildRequest(HttpMethod.Post, options.Path, options.Data, options.Headers);
+        using var response = await httpClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, options.CancellationToken);
+
+        if (response.Content == null)
+        {
+            throw new HttpRequestException($"HTTP {response.StatusCode}: No content");
+        }
+        else
+        if (!response.IsSuccessStatusCode)
+        {
+            var errorText = await response.Content.ReadAsStringAsync();
+            throw new HttpRequestException($"HTTP {response.StatusCode}: {errorText}");
+        }
+
+        var stream = await response.Content.ReadAsStreamAsync();
+
+        // Read NDJSON stream
+        using var reader = new StreamReader(stream, Encoding.UTF8);
+        string? line;
+
+        while ((line = await reader.ReadLineAsync()) != null)
+        {
+            yield return ParseStreamingSyncLine(JObject.Parse(line));
+        }
+    }
+
+    public static StreamingSyncLine? ParseStreamingSyncLine(JObject json)
+    {
+        // Determine the type based on available keys
+        if (json.ContainsKey("checkpoint"))
+        {
+            return json.ToObject<StreamingSyncCheckpoint>();
+        }
+        else if (json.ContainsKey("checkpoint_diff"))
+        {
+            return json.ToObject<StreamingSyncCheckpointDiff>();
+        }
+        else if (json.ContainsKey("checkpoint_complete"))
+        {
+            return json.ToObject<StreamingSyncCheckpointComplete>();
+        }
+        else if (json.ContainsKey("data"))
+        {
+            return json.ToObject<StreamingSyncDataJSON>();
+        }
+        else if (json.ContainsKey("token_expires_in"))
+        {
+            return json.ToObject<StreamingSyncKeepalive>();
+        }
+        else
+        {
+            return null;
+        }
+    }
+
+    private async Task<HttpRequestMessage> BuildRequest(HttpMethod method, string path, object? data = null, Dictionary<string, string>? additionalHeaders = null)
+    {
+        var credentials = await GetCredentials();
+
+        if (credentials == null || string.IsNullOrEmpty(credentials.Endpoint))
+        {
+            throw new InvalidOperationException("PowerSync endpoint not configured");
+        }
+
+        if (string.IsNullOrEmpty(credentials.Token))
+        {
+            // TODO CL error status code 401
+            var error = new HttpRequestException("Not signed in");
+            throw error;
+        }
+
+        var userAgent = GetUserAgent();
+
+        var request = new HttpRequestMessage(method, credentials.Endpoint + path)
+        {
+            Content = data != null ?
+                new StringContent(JsonConvert.SerializeObject(data), Encoding.UTF8, "application/json")
+                : null
+        };
+
+        request.Headers.TryAddWithoutValidation("content-type", "application/json");
+        request.Headers.TryAddWithoutValidation("Authorization", $"Token {credentials.Token}");
+        request.Headers.TryAddWithoutValidation("x-user-agent", userAgent);
+
+        if (additionalHeaders != null)
+        {
+            foreach (var header in additionalHeaders)
+            {
+                request.Headers.TryAddWithoutValidation(header.Key, header.Value);
+            }
+        }
+
+        return request;
+    }
+}
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
new file mode 100644
index 0000000..ca807c4
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
@@ -0,0 +1,803 @@
+namespace PowerSync.Common.Client.Sync.Stream;
+
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
+
+using Newtonsoft.Json;
+
+using PowerSync.Common.Client.Sync.Bucket;
+using PowerSync.Common.DB.Crud;
+using PowerSync.Common.Utils;
+
+public class AdditionalConnectionOptions(int? retryDelayMs = null, int? crudUploadThrottleMs = null)
+{
+    /// <summary>
+    /// Delay for retrying sync streaming operations
+    /// from the PowerSync backend after an error occurs.
+    /// </summary>
+    public int? RetryDelayMs { get; set; } = retryDelayMs;
+
+    /// <summary>
+    /// Backend Connector CRUD operations are throttled
+    /// to occur at most every `CrudUploadThrottleMs`
+    /// milliseconds.
+    /// </summary>
+    public int? CrudUploadThrottleMs { get; set; } = crudUploadThrottleMs;
+}
+
+public class RequiredAdditionalConnectionOptions : AdditionalConnectionOptions
+{
+    public static RequiredAdditionalConnectionOptions DEFAULT_ADDITIONAL_CONNECTION_OPTIONS = new()
+    {
+        CrudUploadThrottleMs = 1000,
+        RetryDelayMs = 5000
+    };
+
+    public new int RetryDelayMs { get; set; }
+
+    public new int CrudUploadThrottleMs { get; set; }
+}
+
+public class StreamingSyncImplementationOptions : AdditionalConnectionOptions
+{
+    public IBucketStorageAdapter Adapter { get; init; } = null!;
+    public Func<Task> UploadCrud { get; init; } = null!;
+    public Remote Remote { get; init; } = null!;
+
+    public ILogger? Logger { get; init; }
+}
+
+public class BaseConnectionOptions(Dictionary<string, object>? parameters = null)
+{
+    /// <summary>
+    /// These parameters are passed to the sync rules and will be available under the `user_parameters` object.
+    /// </summary>
+    public Dictionary<string, object>? Params { get; set; } = parameters;
+}
+
+public class RequiredPowerSyncConnectionOptions : BaseConnectionOptions
+{
+    public new Dictionary<string, object> Params { get; set; } = new();
+}
+
+public class StreamingSyncImplementationEvent
+{
+    /// <summary>
+    /// Set whenever a status update has been attempted to be made or refreshed.
+    /// </summary>
+    public SyncStatusOptions? StatusUpdated { get; set; }
+
+    /// <summary>
+    /// Set whenever the status' members have changed in value.
+    /// </summary>
+    public SyncStatus? StatusChanged { get; set; }
+}
+
+public class PowerSyncConnectionOptions(
+    Dictionary<string, object>? @params = null,
+    int? retryDelayMs = null,
+    int? crudUploadThrottleMs = null
+) : BaseConnectionOptions(@params)
+{
+    /// <summary>
+    /// Delay for retrying sync streaming operations from the PowerSync backend after an error occurs.
+    /// </summary>
+    public int? RetryDelayMs { get; set; } = retryDelayMs;
+
+    /// <summary>
+    /// Backend Connector CRUD operations are throttled to occur at most every `CrudUploadThrottleMs` milliseconds.
+    /// </summary>
+    public int? CrudUploadThrottleMs { get; set; } = crudUploadThrottleMs;
+}
+
+
+public class StreamingSyncImplementation : EventStream<StreamingSyncImplementationEvent>
+{
+    public static RequiredPowerSyncConnectionOptions DEFAULT_STREAM_CONNECTION_OPTIONS = new()
+    {
+        Params = []
+    };
+
+    public static readonly int DEFAULT_CRUD_UPLOAD_THROTTLE_MS = 1000;
+    public static readonly int DEFAULT_RETRY_DELAY_MS = 5000;
+
+    protected StreamingSyncImplementationOptions Options { get; }
+
+    protected CancellationTokenSource? CancellationTokenSource { get; set; }
+
+    private Task? streamingSyncTask;
+    public Action TriggerCrudUpload { get; }
+    private CancellationTokenSource? crudUpdateCts;
+    private readonly ILogger logger;
+
+    private readonly StreamingSyncLocks locks;
+
+    public StreamingSyncImplementation(StreamingSyncImplementationOptions options)
+    {
+        Options = options;
+        SyncStatus = new SyncStatus(new SyncStatusOptions
+        {
+            Connected = false,
+            Connecting = false,
+            LastSyncedAt = null,
+            DataFlow = new SyncDataFlowStatus
+            {
+                Uploading = false,
+                Downloading = false
+            }
+        });
+
+
+        locks = new StreamingSyncLocks();
+        logger = options.Logger ?? NullLogger.Instance;
+
+        CancellationTokenSource = null;
+
+        // TODO CL throttling
+        TriggerCrudUpload = () =>
+        {
+            if (!SyncStatus.Connected || SyncStatus.DataFlowStatus.Uploading)
+            {
+                return;
+            }
+
+            Task.Run(async () => await InternalUploadAllCrud());
+        };
+    }
+
+    /// <summary>
+    /// Indicates if the sync service is connected.
+    /// </summary>
+    public bool IsConnected { get; protected set; }
+
+    /// <summary>
+    /// The timestamp of the last successful sync.
+    /// </summary>
+    public DateTime? LastSyncedAt { get; protected set; }
+
+    /// <summary>
+    /// The current synchronization status.
+    /// </summary>
+    public SyncStatus SyncStatus { get; protected set; }
+
+    public async Task Connect(PowerSyncConnectionOptions? options = null)
+    {
+        if (CancellationTokenSource != null)
+        {
+            await Disconnect();
+        }
+        CancellationTokenSource = new CancellationTokenSource();
+
+
+        streamingSyncTask = StreamingSync(CancellationTokenSource.Token, options);
+
+        var tcs = new TaskCompletionSource<bool>();
+        var cts = new CancellationTokenSource();
+
+        var _ = Task.Run(() =>
+        {
+            foreach (var status in Listen(cts.Token))
+            {
+                if (status.StatusUpdated != null)
+                {
+                    if (status.StatusUpdated.Connected != null)
+                    {
+                        if (status.StatusUpdated.Connected == false)
+                        {
+                            logger.LogWarning("Initial connect attempt did not successfully connect to server");
+                        }
+
+                        tcs.SetResult(true);
+                        cts.Cancel();
+                    }
+                }
+            }
+        });
+
+        await tcs.Task;
+    }
+
+    public async Task Disconnect()
+    {
+        if (CancellationTokenSource == null)
+        {
+            return;
+        }
+        // This might be called multiple times
+        if (!CancellationTokenSource.Token.IsCancellationRequested)
+        {
+            CancellationTokenSource.Cancel();
+        }
+
+        // Await any pending operations before completing the disconnect operation
+        try
+        {
+            if (streamingSyncTask != null)
+            {
+                await streamingSyncTask;
+            }
+        }
+        catch (Exception ex)
+        {
+            // The operation might have failed, all we care about is if it has completed
+            logger.LogWarning("{Message}", ex.Message);
+        }
+
+        streamingSyncTask = null;
+        CancellationTokenSource = null;
+
+        UpdateSyncStatus(new SyncStatusOptions { Connected = false, Connecting = false });
+    }
+
+    protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectionOptions? options)
+    {
+        if (signal == null)
+        {
+            CancellationTokenSource = new CancellationTokenSource();
+            signal = CancellationTokenSource.Token;
+        }
+
+        crudUpdateCts = new CancellationTokenSource();
+        var _ = Task.Run(() =>
+        {
+            foreach (var _ in Options.Adapter.Listen(crudUpdateCts.Token))
+            {
+                TriggerCrudUpload();
+            }
+        });
+
+        // Create a new cancellation token source for nested operations.
+        // This is needed to close any previous connections.
+        var nestedCts = new CancellationTokenSource();
+        signal.Value.Register(() =>
+        {
+            nestedCts.Cancel();
+            crudUpdateCts?.Cancel();
+            crudUpdateCts = null;
+            UpdateSyncStatus(new SyncStatusOptions
+            {
+                Connected = false,
+                Connecting = false,
+                DataFlow = new SyncDataFlowStatus { Downloading = false }
+            });
+        });
+
+        /// This loops runs until [retry] is false or the abort signal is set to aborted.
+        /// Aborting the nestedCts will:
+        /// - Abort any pending fetch requests
+        /// - Close any sync stream ReadableStreams (which will also close any established network requests)
+        while (true)
+        {
+            UpdateSyncStatus(new SyncStatusOptions { Connecting = true });
+
+            try
+            {
+                if (signal.Value.IsCancellationRequested)
+                {
+                    break;
+                }
+                var iterationResult = await StreamingSyncIteration(nestedCts.Token, options);
+                if (!iterationResult.Retry)
+                {
+
+                    // A sync error ocurred that we cannot recover from here.
+                    // This loop must terminate.
+                    // The nestedCts will close any open network requests and streams below.
+                    break;
+                }
+                // Continue immediately
+            }
+            catch (Exception ex)
+            {
+                logger.LogError("Caught exception in streaming sync: {message}", ex.Message);
+
+                // Either:
+                //  - A network request failed with a failed connection or not OKAY response code.
+                //  - There was a sync processing error.
+                // This loop will retry.
+                // The nested abort controller will cleanup any open network requests and streams.
+                // The WebRemote should only abort pending fetch requests or close active Readable streams.
+                //
+                // On error, wait a little before retrying
+                await DelayRetry();
+            }
+            finally
+            {
+
+                if (!signal.Value.IsCancellationRequested)
+                {
+                    // Closing sync stream network requests before retry.
+                    nestedCts.Cancel();
+                    nestedCts = new CancellationTokenSource();
+                }
+
+                UpdateSyncStatus(new SyncStatusOptions
+                {
+                    Connected = false,
+                    Connecting = true // May be unnecessary
+                });
+            }
+        }
+
+        // Mark as disconnected if here
+        UpdateSyncStatus(new SyncStatusOptions
+        {
+            Connected = false,
+            Connecting = false
+        });
+    }
+
+    protected record StreamingSyncIterationResult
+    {
+        public bool Retry { get; init; }
+    }
+
+    protected async Task<StreamingSyncIterationResult> StreamingSyncIteration(CancellationToken signal, PowerSyncConnectionOptions? options)
+    {
+        return await locks.ObtainLock(new LockOptions<StreamingSyncIterationResult>
+        {
+            Type = LockType.SYNC,
+            Token = signal,
+            Callback = async () =>
+            {
+                var resolvedOptions = new RequiredPowerSyncConnectionOptions
+                {
+                    Params = options?.Params ?? DEFAULT_STREAM_CONNECTION_OPTIONS.Params
+                };
+
+                logger.LogDebug("Streaming sync iteration started");
+                Options.Adapter.StartSession();
+                var bucketEntries = await Options.Adapter.GetBucketStates();
+                var initialBuckets = new Dictionary<string, string>();
+
+                foreach (var entry in bucketEntries)
+                {
+                    initialBuckets[entry.Bucket] = entry.OpId;
+                }
+
+                var req = initialBuckets
+                    .Select(kvp => new BucketRequest
+                    {
+                        Name = kvp.Key,
+                        After = kvp.Value
+                    })
+                    .ToList();
+
+                var targetCheckpoint = (Checkpoint?)null;
+                var validatedCheckpoint = (Checkpoint?)null;
+                var appliedCheckpoint = (Checkpoint?)null;
+
+                var bucketSet = new HashSet<string>(initialBuckets.Keys);
+
+                var clientId = await Options.Adapter.GetClientId();
+
+                logger.LogDebug("Requesting stream from server");
+
+                var syncOptions = new SyncStreamOptions
+                {
+                    Path = "/sync/stream",
+                    CancellationToken = signal,
+                    Data = new StreamingSyncRequest
+                    {
+                        Buckets = req,
+                        IncludeChecksum = true,
+                        RawData = true,
+                        Parameters = resolvedOptions.Params, // Replace with actual params
+                        ClientId = clientId
+                    }
+                };
+
+                var stream = Options.Remote.PostStream(syncOptions);
+                var first = true;
+                await foreach (var line in stream)
+                {
+                    if (first)
+                    {
+                        first = false;
+                        logger.LogDebug("Stream established. Processing events");
+                    }
+
+                    if (line == null)
+                    {
+                        logger.LogDebug("Stream has closed while waiting");
+                        // The stream has closed while waiting
+                        return new StreamingSyncIterationResult { Retry = true };
+                    }
+
+                    // A connection is active and messages are being received
+                    if (!SyncStatus.Connected)
+                    {
+                        // There is a connection now
+                        UpdateSyncStatus(new SyncStatusOptions
+                        {
+                            Connected = true
+                        });
+                        TriggerCrudUpload();
+                    }
+
+                    if (line is StreamingSyncCheckpoint syncCheckpoint)
+                    {
+                        logger.LogDebug("Sync checkpoint: {message}", syncCheckpoint);
+
+                        targetCheckpoint = syncCheckpoint.Checkpoint;
+                        var bucketsToDelete = new HashSet<string>(bucketSet);
+                        var newBuckets = new HashSet<string>();
+
+                        foreach (var checksum in syncCheckpoint.Checkpoint.Buckets)
+                        {
+                            newBuckets.Add(checksum.Bucket);
+                            bucketsToDelete.Remove(checksum.Bucket);
+                        }
+                        if (bucketsToDelete.Count > 0)
+                        {
+                            logger.LogDebug("Removing buckets: {message}", string.Join(", ", bucketsToDelete));
+                        }
+
+                        bucketSet = newBuckets;
+                        await Options.Adapter.RemoveBuckets([.. bucketsToDelete]);
+                        await Options.Adapter.SetTargetCheckpoint(targetCheckpoint);
+                    }
+                    else if (line is StreamingSyncCheckpointComplete checkpointComplete)
+                    {
+                        logger.LogDebug("Checkpoint complete: {message}", targetCheckpoint);
+
+                        var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!);
+
+                        if (!result.CheckpointValid)
+                        {
+                            // This means checksums failed. Start again with a new checkpoint.
+                            // TODO: better back-off
+                            await Task.Delay(50);
+                            return new StreamingSyncIterationResult { Retry = true };
+                        }
+                        else if (!result.Ready)
+                        {
+                            // Checksums valid, but need more data for a consistent checkpoint.
+                            // Continue waiting.
+                            // Landing here the whole time
+                        }
+                        else
+                        {
+                            appliedCheckpoint = targetCheckpoint;
+                            logger.LogDebug("Validated checkpoint: {message}", appliedCheckpoint);
+
+                            UpdateSyncStatus(new SyncStatusOptions
+                            {
+                                Connected = true,
+                                LastSyncedAt = DateTime.Now,
+                                DataFlow = new SyncDataFlowStatus { Downloading = false }
+                            });
+
+                        }
+
+                        validatedCheckpoint = targetCheckpoint;
+                    }
+                    else if (line is StreamingSyncCheckpointDiff checkpointDiff)
+                    {
+                        // TODO: It may be faster to just keep track of the diff, instead of the entire checkpoint
+                        if (targetCheckpoint == null)
+                        {
+                            throw new Exception("Checkpoint diff without previous checkpoint");
+                        }
+
+                        var diff = checkpointDiff.CheckpointDiff;
+                        var newBuckets = new Dictionary<string, BucketChecksum>();
+
+                        foreach (var checksum in targetCheckpoint.Buckets)
+                        {
+                            newBuckets[checksum.Bucket] = checksum;
+                        }
+
+                        foreach (var checksum in diff.UpdatedBuckets)
+                        {
+                            newBuckets[checksum.Bucket] = checksum;
+                        }
+
+                        foreach (var bucket in diff.RemovedBuckets)
+                        {
+                            newBuckets.Remove(bucket);
+                        }
+
+                        var newWriteCheckpoint = !string.IsNullOrEmpty(diff.WriteCheckpoint) ? diff.WriteCheckpoint : null;
+                        var newCheckpoint = new Checkpoint
+                        {
+                            LastOpId = diff.LastOpId,
+                            Buckets = [.. newBuckets.Values],
+                            WriteCheckpoint = newWriteCheckpoint
+                        };
+
+                        targetCheckpoint = newCheckpoint;
+
+                        bucketSet = [.. newBuckets.Keys];
+
+                        var bucketsToDelete = diff.RemovedBuckets.ToArray();
+                        if (bucketsToDelete.Length > 0)
+                        {
+                            logger.LogDebug("Remove buckets: {message}", string.Join(", ", bucketsToDelete));
+                        }
+
+                        // Perform async operations
+                        await Options.Adapter.RemoveBuckets(bucketsToDelete);
+                        await Options.Adapter.SetTargetCheckpoint(targetCheckpoint);
+                    }
+                    else if (line is StreamingSyncDataJSON dataJSON)
+                    {
+                        UpdateSyncStatus(new SyncStatusOptions
+                        {
+                            DataFlow = new SyncDataFlowStatus
+                            {
+                                Downloading = true
+                            }
+                        });
+                        await Options.Adapter.SaveSyncData(new SyncDataBatch([SyncDataBucket.FromRow(dataJSON.Data)]));
+                    }
+                    else if (line is StreamingSyncKeepalive keepalive)
+                    {
+                        var remainingSeconds = keepalive.TokenExpiresIn;
+                        if (remainingSeconds == 0)
+                        {
+                            // Connection would be closed automatically right after this
+                            logger.LogDebug("Token expiring; reconnect");
+
+                            // For a rare case where the backend connector does not update the token
+                            // (uses the same one), this should have some delay.
+                            //
+                            await DelayRetry();
+                            return new StreamingSyncIterationResult { Retry = true };
+                        }
+                        TriggerCrudUpload();
+                    }
+                    else
+                    {
+                        logger.LogDebug("Sync complete");
+
+                        if (targetCheckpoint == appliedCheckpoint)
+                        {
+                            UpdateSyncStatus(new SyncStatusOptions
+                            {
+                                Connected = true,
+                                LastSyncedAt = DateTime.Now
+                            });
+                        }
+                        else if (validatedCheckpoint == targetCheckpoint)
+                        {
+                            var result = await Options.Adapter.SyncLocalDatabase(targetCheckpoint!);
+                            if (!result.CheckpointValid)
+                            {
+                                // This means checksums failed. Start again with a new checkpoint.
+                                // TODO: better back-off
+                                await Task.Delay(50);
+                                return new StreamingSyncIterationResult { Retry = false };
+                            }
+                            else if (!result.Ready)
+                            {
+                                // Checksums valid, but need more data for a consistent checkpoint.
+                                // Continue waiting.
+                            }
+                            else
+                            {
+                                appliedCheckpoint = targetCheckpoint;
+                                UpdateSyncStatus(new SyncStatusOptions
+                                {
+                                    Connected = true,
+                                    LastSyncedAt = DateTime.Now,
+                                    DataFlow = new SyncDataFlowStatus
+                                    {
+                                        Downloading = false
+                                    }
+                                });
+                            }
+                        }
+                    }
+                }
+
+                logger.LogDebug("Stream input empty");
+                // Connection closed. Likely due to auth issue.
+                return new StreamingSyncIterationResult { Retry = true };
+            }
+        });
+
+    }
+
+    public new void Close()
+    {
+        crudUpdateCts?.Cancel();
+        base.Close();
+        crudUpdateCts = null;
+    }
+
+    public record ResponseData(
+        [property: JsonProperty("write_checkpoint")] string WriteCheckpoint
+    );
+
+    public record ApiResponse(
+        [property: JsonProperty("data")] ResponseData Data
+    );
+    public async Task<string> GetWriteCheckpoint()
+    {
+        var clientId = await Options.Adapter.GetClientId();
+        var path = $"/write-checkpoint2.json?client_id={clientId}";
+        var response = await Options.Remote.Get<ApiResponse>(path);
+
+        return response.Data.WriteCheckpoint;
+    }
+
+    protected async Task InternalUploadAllCrud()
+    {
+
+        await locks.ObtainLock(new LockOptions<Task>
+        {
+            Type = LockType.CRUD,
+            Callback = async () =>
+            {
+                CrudEntry? checkedCrudItem = null;
+
+                while (true)
+                {
+                    UpdateSyncStatus(new SyncStatusOptions { DataFlow = new SyncDataFlowStatus { Uploading = true } });
+
+                    try
+                    {
+                        // This is the first item in the FIFO CRUD queue.
+                        var nextCrudItem = await Options.Adapter.NextCrudItem();
+                        if (nextCrudItem != null)
+                        {
+                            if (checkedCrudItem?.ClientId == nextCrudItem.ClientId)
+                            {
+                                logger.LogWarning(
+                                    "Potentially previously uploaded CRUD entries are still present in the upload queue. " +
+                                    "Make sure to handle uploads and complete CRUD transactions or batches by calling and awaiting their `.Complete()` method. " +
+                                    "The next upload iteration will be delayed."
+                                );
+                                throw new Exception("Delaying due to previously encountered CRUD item.");
+                            }
+
+                            checkedCrudItem = nextCrudItem;
+                            await Options.UploadCrud();
+                        }
+                        else
+                        {
+                            // Uploading is completed
+                            await Options.Adapter.UpdateLocalTarget(GetWriteCheckpoint);
+                            break;
+                        }
+                    }
+                    catch (Exception ex)
+                    {
+                        checkedCrudItem = null;
+                        UpdateSyncStatus(new SyncStatusOptions { DataFlow = new SyncDataFlowStatus { Uploading = false } });
+
+                        await DelayRetry();
+
+                        if (!IsConnected)
+                        {
+                            // Exit loop if sync stream is no longer connected
+                            break;
+                        }
+
+                        logger.LogDebug("Caught exception when uploading. Upload will retry after a delay. Exception: {message}", ex.Message);
+                    }
+                    finally
+                    {
+                        UpdateSyncStatus(new SyncStatusOptions { DataFlow = new SyncDataFlowStatus { Uploading = false } });
+                    }
+                }
+
+                return Task.CompletedTask;
+            }
+        });
+    }
+
+    public async Task<bool> HasCompletedSync()
+    {
+        return await Options.Adapter.HasCompletedSync();
+    }
+
+    public async Task WaitForReady()
+    {
+        // Do nothing
+        await Task.CompletedTask;
+    }
+
+    protected void UpdateSyncStatus(SyncStatusOptions options)
+    {
+        var updatedStatus = new SyncStatus(new SyncStatusOptions
+        {
+            Connected = options.Connected ?? SyncStatus.Connected,
+            Connecting = !options.Connected.GetValueOrDefault() && (options.Connecting ?? SyncStatus.Connecting),
+            LastSyncedAt = options.LastSyncedAt ?? SyncStatus.LastSyncedAt,
+            DataFlow = new SyncDataFlowStatus
+            {
+                Uploading = options.DataFlow?.Uploading ?? SyncStatus.DataFlowStatus.Uploading,
+                Downloading = options.DataFlow?.Downloading ?? SyncStatus.DataFlowStatus.Downloading
+            }
+        });
+
+        if (!SyncStatus.Equals(updatedStatus))
+        {
+            SyncStatus = updatedStatus;
+            logger.LogDebug("[Sync status updated]: {message}", updatedStatus.ToJSON());
+            // Only trigger this if there was a change
+            Emit(new StreamingSyncImplementationEvent { StatusChanged = updatedStatus });
+        }
+
+        // Trigger this for all updates
+        Emit(new StreamingSyncImplementationEvent { StatusUpdated = options });
+    }
+
+    private async Task DelayRetry()
+    {
+        if (Options.RetryDelayMs.HasValue)
+        {
+            await Task.Delay(Options.RetryDelayMs.Value);
+        }
+    }
+}
+
+enum LockType
+{
+    CRUD,
+    SYNC
+}
+
+class LockOptions<T>
+{
+    public Func<Task<T>> Callback { get; set; } = null!;
+    public LockType Type { get; set; }
+    public CancellationToken? Token { get; set; }
+}
+
+class Lock
+{
+    private readonly SemaphoreSlim semaphore = new(1, 1);
+
+    public async Task<T> Acquire<T>(Func<Task<T>> action)
+    {
+        await semaphore.WaitAsync();
+        try
+        {
+            return await action();
+        }
+        finally
+        {
+            semaphore.Release();
+        }
+    }
+}
+
+class StreamingSyncLocks
+{
+    protected Dictionary<LockType, Lock> Locks { get; private set; } = null!;
+
+    public StreamingSyncLocks()
+    {
+        InitLocks();
+    }
+
+    private void InitLocks()
+    {
+        Locks = new Dictionary<LockType, Lock>
+        {
+            { LockType.CRUD, new Lock() },
+            { LockType.SYNC, new Lock() }
+        };
+    }
+
+    public async Task<T> ObtainLock<T>(LockOptions<T> lockOptions)
+    {
+        if (!Locks.TryGetValue(lockOptions.Type, out var lockInstance))
+        {
+            throw new InvalidOperationException($"Lock type {lockOptions.Type} not found");
+        }
+
+        return await lockInstance.Acquire(async () =>
+        {
+            if (lockOptions.Token?.IsCancellationRequested == true)
+            {
+                throw new OperationCanceledException("Aborted", lockOptions.Token.Value);
+            }
+
+            return await lockOptions.Callback();
+        });
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs
new file mode 100644
index 0000000..c46d4d1
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs
@@ -0,0 +1,149 @@
+namespace PowerSync.Common.Client.Sync.Stream;
+
+using PowerSync.Common.Client.Sync.Bucket;
+using PowerSync.Common.DB.Crud;
+using Newtonsoft.Json;
+
+public class ContinueCheckpointRequest
+{
+    [JsonProperty("buckets")]
+    public List<BucketRequest> Buckets { get; set; } = new();
+
+    [JsonProperty("checkpoint_token")]
+    public string CheckpointToken { get; set; } = "";
+
+    [JsonProperty("limit")]
+    public int? Limit { get; set; }
+}
+
+public class SyncNewCheckpointRequest
+{
+    [JsonProperty("buckets")]
+    public List<BucketRequest>? Buckets { get; set; }
+
+    [JsonProperty("request_checkpoint")]
+    public RequestCheckpoint RequestCheckpoint { get; set; } = new();
+
+    [JsonProperty("limit")]
+    public int? Limit { get; set; }
+}
+
+public class RequestCheckpoint
+{
+    [JsonProperty("include_data")]
+    public bool IncludeData { get; set; }
+
+    [JsonProperty("include_checksum")]
+    public bool IncludeChecksum { get; set; }
+}
+
+public class SyncResponse
+{
+    [JsonProperty("data")]
+    public List<SyncDataBucketJSON>? Data { get; set; }
+
+    [JsonProperty("has_more")]
+    public bool HasMore { get; set; }
+
+    [JsonProperty("checkpoint_token")]
+    public string? CheckpointToken { get; set; }
+
+    [JsonProperty("checkpoint")]
+    public Checkpoint? Checkpoint { get; set; }
+}
+
+public class StreamingSyncRequest
+{
+    [JsonProperty("buckets")]
+    public List<BucketRequest>? Buckets { get; set; }
+
+    [JsonProperty("only")]
+    public List<string>? Only { get; set; } = [];
+
+    [JsonProperty("include_checksum")]
+    public bool IncludeChecksum { get; set; }
+
+    [JsonProperty("raw_data")]
+    public bool RawData { get; set; }
+
+    [JsonProperty("parameters")]
+    public Dictionary<string, object>? Parameters { get; set; }
+
+    [JsonProperty("client_id")]
+    public string? ClientId { get; set; }
+}
+
+public class BucketRequest
+{
+    [JsonProperty("name")]
+    public string Name { get; set; } = "";
+
+    [JsonProperty("after")]
+    public string After { get; set; } = "";
+}
+
+public abstract class StreamingSyncLine { }
+
+public class StreamingSyncCheckpoint : StreamingSyncLine
+{
+    [JsonProperty("checkpoint")]
+    public Checkpoint Checkpoint { get; set; } = new();
+}
+
+public class StreamingSyncCheckpointDiff : StreamingSyncLine
+{
+    [JsonProperty("checkpoint_diff")]
+    public CheckpointDiff CheckpointDiff { get; set; } = new();
+}
+
+public class CheckpointDiff
+{
+    [JsonProperty("last_op_id")]
+    public string LastOpId { get; set; } = "";
+
+    [JsonProperty("updated_buckets")]
+    public List<BucketChecksum> UpdatedBuckets { get; set; } = new();
+
+    [JsonProperty("removed_buckets")]
+    public List<string> RemovedBuckets { get; set; } = new();
+
+    [JsonProperty("write_checkpoint")]
+    public string WriteCheckpoint { get; set; } = "";
+}
+
+public class StreamingSyncDataJSON : StreamingSyncLine
+{
+    [JsonProperty("data")]
+    public SyncDataBucketJSON Data { get; set; } = new();
+}
+
+public class StreamingSyncCheckpointComplete : StreamingSyncLine
+{
+    [JsonProperty("checkpoint_complete")]
+    public CheckpointComplete CheckpointComplete { get; set; } = new();
+}
+
+public class CheckpointComplete
+{
+    [JsonProperty("last_op_id")]
+    public string LastOpId { get; set; } = "";
+}
+
+public class StreamingSyncKeepalive : StreamingSyncLine
+{
+    [JsonProperty("token_expires_in")]
+    public int? TokenExpiresIn { get; set; }
+}
+
+
+public class CrudRequest
+{
+    [JsonProperty("data")]
+    public List<CrudEntry> Data { get; set; } = new();
+}
+
+public class CrudResponse
+{
+    [JsonProperty("checkpoint")]
+    public string? Checkpoint { get; set; }
+}
diff --git a/PowerSync/PowerSync.Common/DB/Crud/CrudBatch.cs b/PowerSync/PowerSync.Common/DB/Crud/CrudBatch.cs
new file mode 100644
index 0000000..027f72d
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Crud/CrudBatch.cs
@@ -0,0 +1,16 @@
+namespace PowerSync.Common.DB.Crud;
+
+using System;
+using System.Threading.Tasks;
+
+public class CrudBatch(CrudEntry[] Crud, bool HaveMore, Func<string?, Task> CompleteCallback)
+{
+    public CrudEntry[] Crud { get; private set; } = Crud;
+
+    public bool HaveMore { get; private set; } = HaveMore;
+
+    public async Task Complete(string? checkpoint = null)
+    {
+        await CompleteCallback(checkpoint);
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
new file mode 100644
index 0000000..ce974d0
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
@@ -0,0 +1,113 @@
+namespace PowerSync.Common.DB.Crud;
+
+using System.Collections.Generic;
+using Newtonsoft.Json;
+
+public enum UpdateType
+{
+    [JsonProperty("PUT")]
+    PUT,
+
+    [JsonProperty("PATCH")]
+    PATCH,
+
+    [JsonProperty("DELETE")]
+    DELETE
+}
+
+public class CrudEntryJSON
+{
+    [JsonProperty("id")]
+    public string Id { get; set; } = null!;
+
+    [JsonProperty("data")]
+    public string Data { get; set; } = null!;
+
+    [JsonProperty("tx_id")]
+    public long? TransactionId { get; set; }
+}
+
+public class CrudEntryDataJSON
+{
+    [JsonProperty("data")]
+    public Dictionary<string, object> Data { get; set; } = new();
+
+    [JsonProperty("op")]
+    public UpdateType Op { get; set; }
+
+    [JsonProperty("type")]
+    public string Type { get; set; } = null!;
+
+    [JsonProperty("id")]
+    public string Id { get; set; } = null!;
+}
+
+public class CrudEntryOutputJSON
+{
+    [JsonProperty("op_id")]
+    public int OpId { get; set; }
+
+    [JsonProperty("op")]
+    public UpdateType Op { get; set; }
+
+    [JsonProperty("type")]
+    public string Type { get; set; } = null!;
+
+    [JsonProperty("id")]
+    public string Id { get; set; } = null!;
+
+    [JsonProperty("tx_id")]
+    public long? TransactionId { get; set; }
+
+    [JsonProperty("data")]
+    public Dictionary<string, object>? Data { get; set; }
+}
+
+public class CrudEntry(int clientId, UpdateType op, string table, string id, long? transactionId = null, Dictionary<string, object>? opData = null)
+{
+    public int ClientId { get; private set; } = clientId;
+    public string Id { get; private set; } = id;
+    public UpdateType Op { get; private set; } = op;
+    public Dictionary<string, object>? OpData { get; private set; } = opData;
+    public string Table { get; private set; } = table;
+    public long? TransactionId { get; private set; } = transactionId;
+
+    public static CrudEntry FromRow(CrudEntryJSON dbRow)
+    {
+        var data = JsonConvert.DeserializeObject<CrudEntryDataJSON>(dbRow.Data)
+                   ?? throw new JsonException("Invalid JSON format in CrudEntryJSON data.");
+
+        return new CrudEntry(
+            int.Parse(dbRow.Id),
+            data.Op,
+            data.Type,
+            data.Id,
+            dbRow.TransactionId,
+            data.Data
+        );
+    }
+
+    public CrudEntryOutputJSON ToJSON()
+    {
+        return new CrudEntryOutputJSON
+        {
+            OpId = ClientId,
+            Op = Op,
+            Type = Table,
+            Id = Id,
+            TransactionId = TransactionId,
+            Data = OpData
+        };
+    }
+
+    public override bool Equals(object? obj)
+    {
+        if (obj is not CrudEntry other) return false;
+        return JsonConvert.SerializeObject(this) == JsonConvert.SerializeObject(other);
+    }
+
+    public override int GetHashCode()
+    {
+        return JsonConvert.SerializeObject(this).GetHashCode();
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/DB/Crud/CrudTransaction.cs b/PowerSync/PowerSync.Common/DB/Crud/CrudTransaction.cs
new file mode 100644
index 0000000..d516f4b
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Crud/CrudTransaction.cs
@@ -0,0 +1,9 @@
+namespace PowerSync.Common.DB.Crud;
+
+using System;
+using System.Threading.Tasks;
+
+public class CrudTransaction(CrudEntry[] crud, Func<string?, Task> complete, long? transactionId = null) : CrudBatch(crud, false, complete)
+{
+    public long? TransactionId { get; private set; } = transactionId;
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs
new file mode 100644
index 0000000..a12fcf3
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs
@@ -0,0 +1,85 @@
+namespace PowerSync.Common.DB.Crud;
+
+using Newtonsoft.Json;
+
+public class SyncDataFlowStatus
+{
+    [JsonProperty("downloading")]
+    public bool Downloading { get; set; } = false;
+
+    [JsonProperty("uploading")]
+    public bool Uploading { get; set; } = false;
+}
+
+public class SyncStatusOptions
+{
+    public SyncStatusOptions() { }
+
+    public SyncStatusOptions(SyncStatusOptions options)
+    {
+        Connected = options.Connected;
+        Connecting = options.Connecting;
+        DataFlow = options.DataFlow;
+        LastSyncedAt = options.LastSyncedAt;
+        HasSynced = options.HasSynced;
+    }
+
+
+    [JsonProperty("connected")]
+    public bool? Connected { get; set; }
+
+    [JsonProperty("connecting")]
+    public bool? Connecting { get; set; }
+
+    [JsonProperty("dataFlow")]
+    public SyncDataFlowStatus? DataFlow { get; set; }
+
+    [JsonProperty("lastSyncedAt")]
+    public DateTime? LastSyncedAt { get; set; }
+
+    [JsonProperty("hasSynced")]
+    public bool? HasSynced { get; set; }
+}
+
+public class SyncStatus(SyncStatusOptions options)
+{
+    public SyncStatusOptions Options { get; } = options ?? new SyncStatusOptions();
+
+    public bool Connected => Options.Connected ?? false;
+
+    public bool Connecting => Options.Connecting ?? false;
+
+    /// <summary>
+    /// Time that the last sync has fully completed, if any.
+    /// Currently, this is reset to null after a restart.
+    /// </summary>
+    public DateTime? LastSyncedAt => Options.LastSyncedAt;
+
+    /// <summary>
+    /// Indicates whether there has been at least one full sync.
+    /// Is null when unknown, for example when state is still being loaded from the database.
+    /// </summary>
+    public bool? HasSynced => Options.HasSynced;
+
+    /// <summary>
+    /// Upload/download status.
+    /// </summary>
+    public SyncDataFlowStatus DataFlowStatus => Options.DataFlow ?? new SyncDataFlowStatus();
+
+
+    public bool IsEqual(SyncStatus status)
+    {
+        return JsonConvert.SerializeObject(Options) == JsonConvert.SerializeObject(status.Options);
+    }
+
+    public string GetMessage()
+    {
+        var dataFlow = DataFlowStatus;
+        return $"SyncStatus<connected: {Connected} connecting: {Connecting} lastSyncedAt: {LastSyncedAt} hasSynced: {HasSynced}. Downloading: {dataFlow.Downloading}. Uploading: {dataFlow.Uploading}>";
+    }
+
+    public string ToJSON()
+    {
+        return JsonConvert.SerializeObject(this);
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/DB/Crud/UploadQueueStatus.cs b/PowerSync/PowerSync.Common/DB/Crud/UploadQueueStatus.cs
new file mode 100644
index 0000000..7813617
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Crud/UploadQueueStatus.cs
@@ -0,0 +1,20 @@
+namespace PowerSync.Common.DB.Crud;
+
+public class UploadQueueStats(int count, long? size = null)
+{
+    public int Count { get; set; } = count;
+
+    public long? Size { get; set; } = size;
+
+    public override string ToString()
+    {
+        if (Size == null)
+        {
+            return $"UploadQueueStats<count: {Count}>";
+        }
+        else
+        {
+            return $"UploadQueueStats<count: {Count} size: {Size / 1024.0}kB>";
+        }
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/DB/IDBAdapter.cs b/PowerSync/PowerSync.Common/DB/IDBAdapter.cs
new file mode 100644
index 0000000..b0c906b
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/IDBAdapter.cs
@@ -0,0 +1,167 @@
+namespace PowerSync.Common.DB;
+
+using System.Collections.Generic;
+using System.Threading.Tasks;
+
+using PowerSync.Common.Utils;
+
+public class NonQueryResult
+{
+    // Represents the auto-generated row id if applicable.
+    public long? InsertId { get; set; }
+
+    // Number of affected rows.
+    public int RowsAffected { get; set; }
+}
+
+public class QueryResult
+{
+    public class QueryRows
+    {
+        // Raw array with all dataset.
+        public List<Dictionary<string, object>> Array { get; set; } = [];
+
+        // The length of the dataset.
+        public int Length => Array.Count;
+    }
+    public QueryRows Rows { get; set; } = new QueryRows();
+}
+
+public interface IDBGetUtils
+{
+    // Execute a read-only query and return results.
+    Task<T[]> GetAll<T>(string sql, params object[]? parameters);
+
+    // Execute a read-only query and return the first result, or null if the ResultSet is empty.
+    Task<T?> GetOptional<T>(string sql, params object[]? parameters);
+
+    // Execute a read-only query and return the first result, error if the ResultSet is empty.
+    Task<T> Get<T>(string sql, params object[]? parameters);
+}
+
+public interface ILockContext : IDBGetUtils
+{
+    // Execute a single write statement.
+    Task<NonQueryResult> Execute(string query, object[]? parameters = null);
+}
+
+public interface ITransaction : ILockContext
+{
+    // Commit multiple changes to the local DB using the Transaction context.
+    Task Commit();
+
+    // Roll back multiple attempted changes using the Transaction context.
+    Task Rollback();
+}
+
+public enum RowUpdateType
+{
+    SQLITE_INSERT = 18,
+    SQLITE_DELETE = 9,
+    SQLITE_UPDATE = 23
+}
+
+public class TableUpdateOperation(RowUpdateType OpType, long RowId)
+{
+    public RowUpdateType OpType { get; set; } = OpType;
+    public long RowId { get; set; } = RowId;
+}
+
+public interface INotification
+{
+}
+
+public class UpdateNotification(string table, RowUpdateType OpType, long RowId) : TableUpdateOperation(OpType, RowId), INotification
+{
+    public string Table { get; set; } = table;
+}
+
+public class BatchedUpdateNotification : INotification
+{
+    public UpdateNotification[] RawUpdates { get; set; } = [];
+    public string[] Tables { get; set; } = [];
+    public Dictionary<string, TableUpdateOperation[]> GroupedUpdates { get; set; } = [];
+}
+
+public class DBAdapterEvent
+{
+    public INotification? TablesUpdated { get; set; }
+}
+
+public class DBLockOptions
+{
+    // Optional timeout in milliseconds.
+    public int? TimeoutMs { get; set; }
+}
+
+public class DBAdapterUtils
+{
+    public static string[] ExtractTableUpdates(INotification update)
+    {
+        return update switch
+        {
+            BatchedUpdateNotification batchedUpdate => batchedUpdate.Tables,
+            UpdateNotification singleUpdate => [singleUpdate.Table],
+            _ => throw new ArgumentException("Invalid update type", nameof(update))
+        };
+    }
+}
+
+public interface IDBAdapter : IEventStream<DBAdapterEvent>, ILockContext
+{
+    /// <summary>
+    /// Closes the adapter. 
+    /// </summary>
+    new void Close();
+
+    /// <summary>
+    /// Execute a batch of write statements.
+    /// </summary>
+    Task<QueryResult> ExecuteBatch(string query, object[][]? parameters = null);
+
+    /// <summary>
+    /// The name of the adapter.
+    /// </summary>
+    string Name { get; }
+
+    /// <summary>
+    /// Executes a read lock with the given function.
+    /// </summary>
+    Task<T> ReadLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null);
+
+    /// <summary>
+    /// Executes a read transaction with the given function.
+    /// </summary>
+    Task<T> ReadTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null);
+
+    /// <summary>
+    /// Executes a write lock with the given function.
+    /// </summary>
+    Task WriteLock(Func<ILockContext, Task> fn, DBLockOptions? options = null);
+    /// <summary>
+    /// Executes a write lock with the given function.
+    /// </summary>
+    ///
+    /// <remarks>
+    /// This is an overload of <see cref="WriteLock"/> that allows the function to return a result.
+    /// </remarks>
+    Task<T> WriteLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null);
+
+    /// <summary>
+    /// Executes a write transaction with the given function. 
+    /// </summary>
+    Task WriteTransaction(Func<ITransaction, Task> fn, DBLockOptions? options = null);
+    /// <summary>
+    /// Executes a write transaction with the given function. 
+    /// </summary>
+    /// 
+    /// <remarks>
+    /// This is an overload of <see cref="WriteTransaction"/> that allows the function to return a result.
+    /// </remarks>
+    Task<T> WriteTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null);
+
+    /// <summary>
+    /// This method refreshes the schema information across all connections. This is for advanced use cases, and should generally not be needed.
+    /// </summary>
+    Task RefreshSchema();
+}
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Column.cs b/PowerSync/PowerSync.Common/DB/Schema/Column.cs
new file mode 100644
index 0000000..edc80f3
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Schema/Column.cs
@@ -0,0 +1,34 @@
+namespace PowerSync.Common.DB.Schema;
+
+using Newtonsoft.Json;
+
+public enum ColumnType
+{
+    TEXT,
+    INTEGER,
+    REAL
+}
+
+public class ColumnOptions(string Name, ColumnType? Type)
+{
+    public string Name { get; set; } = Name;
+    public ColumnType? Type { get; set; } = Type;
+}
+
+public class Column(ColumnOptions options)
+{
+    public const int MAX_AMOUNT_OF_COLUMNS = 1999;
+
+    public string Name { get; set; } = options.Name;
+
+    public ColumnType Type { get; set; } = options.Type ?? ColumnType.TEXT;
+
+    public string ToJSON()
+    {
+        return JsonConvert.SerializeObject(new
+        {
+            name = Name,
+            type = Type.ToString()
+        });
+    }
+}
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Index.cs b/PowerSync/PowerSync.Common/DB/Schema/Index.cs
new file mode 100644
index 0000000..257cbe6
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Schema/Index.cs
@@ -0,0 +1,25 @@
+namespace PowerSync.Common.DB.Schema;
+
+using Newtonsoft.Json;
+
+public class IndexOptions(string name, List<IndexedColumn>? columns = null)
+{
+    public string Name { get; set; } = name;
+    public List<IndexedColumn>? Columns { get; set; } = columns ?? new List<IndexedColumn>();
+}
+
+public class Index(IndexOptions options)
+{
+    public string Name { get; set; } = options.Name;
+
+    public List<IndexedColumn> Columns => options.Columns ?? [];
+
+    public string ToJSON(Table table)
+    {
+        return JsonConvert.SerializeObject(new
+        {
+            name = Name,
+            columns = Columns.Select(column => column.ToJSON(table)).ToList()
+        });
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs b/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
new file mode 100644
index 0000000..1a42038
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
@@ -0,0 +1,31 @@
+namespace PowerSync.Common.DB.Schema;
+
+using Newtonsoft.Json;
+
+public class IndexColumnOptions(string Name, bool Ascending = true)
+{
+    public string Name { get; set; } = Name;
+    public bool Ascending { get; set; } = Ascending;
+}
+
+public class IndexedColumn(IndexColumnOptions options)
+{
+    protected string Name { get; set; } = options.Name;
+
+    protected bool Ascending { get; set; } = options.Ascending;
+
+
+    public object ToJSON(Table table)
+    {
+        var colType = table.OriginalColumns.TryGetValue(Name, out var value) ? value : default;
+
+        return JsonConvert.SerializeObject(
+         new
+         {
+             name = Name,
+             ascending = Ascending,
+             type = colType.ToString()
+         }
+        );
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Schema.cs b/PowerSync/PowerSync.Common/DB/Schema/Schema.cs
new file mode 100644
index 0000000..2fc5e0e
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Schema/Schema.cs
@@ -0,0 +1,27 @@
+namespace PowerSync.Common.DB.Schema;
+
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+
+public class Schema(Dictionary<string, Table> tables)
+{
+    private readonly Dictionary<string, Table> Tables = tables;
+
+    public string ToJSON()
+    {
+        var jsonObject = new
+        {
+            tables = Tables.Select(kv =>
+            {
+                var json = JObject.Parse(kv.Value.ToJSON(kv.Key));
+                var orderedJson = new JObject { ["name"] = kv.Key };
+                orderedJson.Merge(json, new JsonMergeSettings { MergeArrayHandling = MergeArrayHandling.Concat });
+                return orderedJson;
+            }).ToList()
+        };
+
+
+        return JsonConvert.SerializeObject(jsonObject);
+    }
+}
+
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Table.cs b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
new file mode 100644
index 0000000..65359b1
--- /dev/null
+++ b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
@@ -0,0 +1,67 @@
+namespace PowerSync.Common.DB.Schema;
+
+using Newtonsoft.Json;
+
+// TODO CL Need to port this to C#
+// export const InvalidSQLCharacters = /["'%,.#\s[\]]/;
+
+
+public class TableOptions(
+    Dictionary<string, List<string>>? indexes = null,
+    bool? localOnly = null,
+    bool? insertOnly = null,
+    string? viewName = null)
+{
+    public Dictionary<string, List<string>> Indexes { get; set; } = indexes ?? [];
+
+    public bool LocalOnly { get; set; } = localOnly ?? false;
+
+    public bool InsertOnly { get; set; } = insertOnly ?? false;
+
+    public string? ViewName { get; set; } = viewName;
+}
+
+public class Table
+{
+    protected TableOptions Options { get; set; }
+
+    public Dictionary<string, ColumnType> OriginalColumns;
+
+    private readonly List<Column> ConvertedColumns;
+    private readonly List<Index> ConvertedIndexes;
+
+    public Table(Dictionary<string, ColumnType> columns, TableOptions? options = null)
+    {
+        ConvertedColumns = [.. columns.Select(kv => new Column(new ColumnOptions(kv.Key, kv.Value)))];
+
+        ConvertedIndexes = [.. (Options?.Indexes ?? [])
+            .Select(kv =>
+                new Index(new IndexOptions(
+                    kv.Key,
+                    [.. kv.Value.Select(name =>
+                        new IndexedColumn(new IndexColumnOptions(
+                            name.Replace("-", ""), !name.StartsWith("-")))
+                    )]
+                ))
+            )];
+
+        Options = options ?? new TableOptions();
+
+        OriginalColumns = columns;
+    }
+
+    public string ToJSON(string Name = "")
+    {
+        var jsonObject = new
+        {
+            view_name = Options.ViewName ?? Name,
+            local_only = Options.LocalOnly,
+            insert_only = Options.InsertOnly,
+            columns = ConvertedColumns.Select(c => JsonConvert.DeserializeObject<object>(c.ToJSON())).ToList(),
+            indexes = ConvertedIndexes.Select(e => JsonConvert.DeserializeObject<object>(e.ToJSON(this))).ToList()
+        };
+
+        return JsonConvert.SerializeObject(jsonObject);
+    }
+}
+
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
new file mode 100644
index 0000000..ff87267
--- /dev/null
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
@@ -0,0 +1,325 @@
+namespace PowerSync.Common.MDSQLite;
+
+using System;
+using System.Threading.Tasks;
+
+using Microsoft.Data.Sqlite;
+
+using Nito.AsyncEx;
+
+using PowerSync.Common.DB;
+using PowerSync.Common.Utils;
+
+public class MDSQLiteAdapterOptions()
+{
+    public string Name { get; set; } = null!;
+
+    public MDSQLiteOptions? SqliteOptions;
+
+}
+
+public class MDSQLiteAdapter : EventStream<DBAdapterEvent>, IDBAdapter
+{
+    public string Name => options.Name;
+
+    public MDSQLiteConnection? writeConnection;
+    public MDSQLiteConnection? readConnection;
+
+    private readonly Task initialized;
+
+    protected MDSQLiteAdapterOptions options;
+
+    protected RequiredMDSQLiteOptions resolvedMDSQLiteOptions;
+    private CancellationTokenSource? tablesUpdatedCts;
+
+    private static readonly AsyncLock writeMutex = new();
+    private static readonly AsyncLock readMutex = new();
+
+    public MDSQLiteAdapter(MDSQLiteAdapterOptions options)
+    {
+        this.options = options;
+        resolvedMDSQLiteOptions = resolveMDSQLiteOptions(options.SqliteOptions);
+        initialized = Init();
+    }
+
+    private RequiredMDSQLiteOptions resolveMDSQLiteOptions(MDSQLiteOptions? options)
+    {
+        var defaults = RequiredMDSQLiteOptions.DEFAULT_SQLITE_OPTIONS;
+        return new RequiredMDSQLiteOptions
+        {
+            JournalMode = options?.JournalMode ?? defaults.JournalMode,
+            Synchronous = options?.Synchronous ?? defaults.Synchronous,
+            JournalSizeLimit = options?.JournalSizeLimit ?? defaults.JournalSizeLimit,
+            CacheSizeKb = options?.CacheSizeKb ?? defaults.CacheSizeKb,
+            TemporaryStorage = options?.TemporaryStorage ?? defaults.TemporaryStorage,
+            LockTimeoutMs = options?.LockTimeoutMs ?? defaults.LockTimeoutMs,
+            EncryptionKey = options?.EncryptionKey ?? defaults.EncryptionKey,
+            Extensions = options?.Extensions ?? defaults.Extensions
+        };
+    }
+
+    private async Task Init()
+    {
+        writeConnection = await OpenConnection(options.Name);
+        readConnection = await OpenConnection(options.Name);
+
+
+        string[] baseStatements =
+        [
+            $"PRAGMA busy_timeout = {resolvedMDSQLiteOptions.LockTimeoutMs}",
+            $"PRAGMA cache_size = -{resolvedMDSQLiteOptions.CacheSizeKb}",
+            $"PRAGMA temp_store = {resolvedMDSQLiteOptions.TemporaryStorage}"
+        ];
+
+        string[] writeConnectionStatements =
+        [
+            .. baseStatements,
+            $"PRAGMA journal_mode = {resolvedMDSQLiteOptions.JournalMode}",
+            $"PRAGMA journal_size_limit = {resolvedMDSQLiteOptions.JournalSizeLimit}",
+            $"PRAGMA synchronous = {resolvedMDSQLiteOptions.Synchronous}",
+        ];
+
+
+        string[] readConnectionStatements =
+        [
+            .. baseStatements,
+            "PRAGMA query_only = true",
+        ];
+
+        foreach (var statement in writeConnectionStatements)
+        {
+            for (int tries = 0; tries < 30; tries++)
+            {
+                await writeConnection!.Execute(statement);
+                tries = 30;
+            }
+        }
+
+        foreach (var statement in readConnectionStatements)
+        {
+            await readConnection!.Execute(statement);
+        }
+
+        tablesUpdatedCts = new CancellationTokenSource();
+        var _ = Task.Run(() =>
+        {
+            foreach (var notification in writeConnection!.Listen(tablesUpdatedCts.Token))
+            {
+                if (notification.TablesUpdated != null)
+                {
+                    Emit(notification);
+                }
+            }
+        });
+    }
+
+    protected async Task<MDSQLiteConnection> OpenConnection(string dbFilename)
+    {
+        var db = OpenDatabase(dbFilename);
+        LoadExtension(db);
+
+        var connection = new MDSQLiteConnection(new MDSQLiteConnectionOptions(db));
+        await connection.Execute("SELECT powersync_init()");
+
+        return connection;
+    }
+
+    private static SqliteConnection OpenDatabase(string dbFilename)
+    {
+        var connection = new SqliteConnection($"Data Source={dbFilename}");
+        connection.Open();
+        return connection;
+    }
+
+    private void LoadExtension(SqliteConnection db)
+    {
+        string extensionPath = PowerSyncPathResolver.GetNativeLibraryPath(AppContext.BaseDirectory);
+        db.EnableExtensions(true);
+        db.LoadExtension(extensionPath, "sqlite3_powersync_init");
+    }
+
+    public new void Close()
+    {
+        tablesUpdatedCts?.Cancel();
+        base.Close();
+        writeConnection?.Close();
+    }
+
+    public async Task<NonQueryResult> Execute(string query, object[]? parameters = null)
+    {
+        return await WriteLock((ctx) => ctx.Execute(query, parameters));
+    }
+
+    public Task<QueryResult> ExecuteBatch(string query, object[][]? parameters = null)
+    {
+        // https://learn.microsoft.com/en-gb/dotnet/standard/data/sqlite/batching
+        throw new NotImplementedException();
+    }
+
+    public async Task<T> Get<T>(string sql, params object[]? parameters)
+    {
+        return await ReadLock((ctx) => ctx.Get<T>(sql, parameters));
+        ;
+    }
+
+    public async Task<T[]> GetAll<T>(string sql, params object[]? parameters)
+    {
+        return await ReadLock((ctx) => ctx.GetAll<T>(sql, parameters));
+    }
+
+    public async Task<T?> GetOptional<T>(string sql, params object[]? parameters)
+    {
+        return await ReadLock((ctx) => ctx.GetOptional<T>(sql, parameters));
+    }
+
+    public async Task<T> ReadTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null)
+    {
+        return await ReadLock((ctx) => InternalTransaction(new MDSQLiteTransaction(readConnection!)!, fn));
+    }
+
+    public async Task<T> ReadLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null)
+    {
+        await initialized;
+
+        T result;
+        using (await readMutex.LockAsync())
+        {
+            result = await fn(readConnection!);
+        }
+
+        return result;
+    }
+
+    public async Task WriteLock(Func<ILockContext, Task> fn, DBLockOptions? options = null)
+    {
+        await initialized;
+
+        using (await writeMutex.LockAsync())
+        {
+            await fn(writeConnection!);
+        }
+
+        writeConnection!.FlushUpdates();
+
+    }
+
+    public async Task<T> WriteLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null)
+    {
+        await initialized;
+
+        T result;
+        using (await writeMutex.LockAsync())
+        {
+            result = await fn(writeConnection!);
+        }
+
+        writeConnection!.FlushUpdates();
+
+        return result;
+    }
+
+    public async Task WriteTransaction(Func<ITransaction, Task> fn, DBLockOptions? options = null)
+    {
+        await WriteLock(ctx => InternalTransaction(new MDSQLiteTransaction(writeConnection!)!, fn));
+    }
+
+    public async Task<T> WriteTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null)
+    {
+        return await WriteLock((ctx) => InternalTransaction(new MDSQLiteTransaction(writeConnection!)!, fn));
+    }
+
+    protected static async Task InternalTransaction(
+        MDSQLiteTransaction ctx,
+        Func<ITransaction, Task> fn)
+    {
+        await RunTransaction(ctx, () => fn(ctx));
+    }
+
+    protected static async Task<T> InternalTransaction<T>(
+        MDSQLiteTransaction ctx,
+        Func<ITransaction, Task<T>> fn)
+    {
+        T result = default!;
+        await RunTransaction(ctx, async () =>
+        {
+            result = await fn(ctx);
+        });
+
+        return result;
+    }
+
+    private static async Task RunTransaction(
+        MDSQLiteTransaction ctx,
+        Func<Task> action)
+    {
+        try
+        {
+            await ctx.Begin();
+            await action();
+            await ctx.Commit();
+        }
+        catch (Exception)
+        {
+            // In rare cases, a rollback may fail. Safe to ignore.
+            try { await ctx.Rollback(); }
+            catch
+            {
+                // Ignore rollback errors
+            }
+            throw;
+        }
+    }
+
+    public async Task RefreshSchema()
+    {
+        await initialized;
+        await writeConnection!.RefreshSchema();
+        await readConnection!.RefreshSchema();
+    }
+}
+
+public class MDSQLiteTransaction(MDSQLiteConnection connection) : ITransaction
+{
+    private readonly MDSQLiteConnection connection = connection;
+    private bool finalized = false;
+
+    public async Task Begin()
+    {
+        if (finalized) return;
+        await connection.Execute("BEGIN");
+    }
+
+    public async Task Commit()
+    {
+        if (finalized) return;
+        finalized = true;
+        await connection.Execute("COMMIT");
+    }
+
+    public async Task Rollback()
+    {
+        if (finalized) return;
+        finalized = true;
+        await connection.Execute("ROLLBACK");
+    }
+
+    public Task<NonQueryResult> Execute(string query, object[]? parameters = null)
+    {
+        return connection.Execute(query, parameters);
+    }
+
+    public Task<T> Get<T>(string sql, params object[]? parameters)
+    {
+        return connection.Get<T>(sql, parameters);
+    }
+
+    public Task<T[]> GetAll<T>(string sql, params object[]? parameters)
+    {
+        return connection.GetAll<T>(sql, parameters);
+    }
+
+    public Task<T?> GetOptional<T>(string sql, params object[]? parameters)
+    {
+        return connection.GetOptional<T>(sql, parameters);
+    }
+}
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
new file mode 100644
index 0000000..7d41507
--- /dev/null
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
@@ -0,0 +1,222 @@
+namespace PowerSync.Common.MDSQLite;
+
+using System.Threading.Tasks;
+
+using Microsoft.Data.Sqlite;
+
+using Newtonsoft.Json;
+
+using PowerSync.Common.DB;
+using PowerSync.Common.Utils;
+
+using SQLitePCL;
+
+public class MDSQLiteConnectionOptions(SqliteConnection database)
+{
+    public SqliteConnection Database { get; set; } = database;
+}
+
+public class MDSQLiteConnection : EventStream<DBAdapterEvent>, ILockContext
+{
+
+    public SqliteConnection Db;
+    private List<UpdateNotification> updateBuffer;
+    public MDSQLiteConnection(MDSQLiteConnectionOptions options)
+    {
+        Db = options.Database;
+        updateBuffer = [];
+
+        raw.sqlite3_rollback_hook(Db.Handle, RollbackHook, IntPtr.Zero);
+        raw.sqlite3_update_hook(Db.Handle, UpdateHook, IntPtr.Zero);
+    }
+
+    private void RollbackHook(object user_data)
+    {
+        updateBuffer.Clear();
+    }
+
+    private void UpdateHook(object user_data, int type, utf8z database, utf8z table, long rowId)
+    {
+        var opType = type switch
+        {
+            18 => RowUpdateType.SQLITE_INSERT,
+            9 => RowUpdateType.SQLITE_DELETE,
+            23 => RowUpdateType.SQLITE_UPDATE,
+            _ => throw new InvalidOperationException($"Unknown update type: {type}"),
+        };
+        updateBuffer.Add(new UpdateNotification(table.utf8_to_string(), opType, rowId));
+    }
+
+    public void FlushUpdates()
+    {
+        if (updateBuffer.Count == 0)
+        {
+            return;
+        }
+
+        var groupedUpdates = updateBuffer
+       .GroupBy(update => update.Table)
+       .ToDictionary(
+           group => group.Key,
+           group => group.Select(update => new TableUpdateOperation(update.OpType, update.RowId)).ToArray()
+       );
+
+        var batchedUpdate = new BatchedUpdateNotification
+        {
+            GroupedUpdates = groupedUpdates,
+            RawUpdates = updateBuffer.ToArray(),
+            Tables = groupedUpdates.Keys.ToArray()
+        };
+
+        updateBuffer.Clear();
+        Emit(new DBAdapterEvent { TablesUpdated = batchedUpdate });
+    }
+
+    private static void PrepareCommand(SqliteCommand command, string query, object[]? parameters)
+    {
+        if (parameters == null || parameters.Length == 0)
+        {
+            command.CommandText = query;
+            return;
+        }
+
+        var parameterNames = new List<string>();
+
+        // Count placeholders
+        int placeholderCount = query.Count(c => c == '?');
+        if (placeholderCount != parameters.Length)
+        {
+            throw new ArgumentException("Number of provided parameters does not match the number of `?` placeholders in the query.");
+        }
+
+        // Replace `?` sequentially with named parameters
+        for (int i = 0; i < parameters.Length; i++)
+        {
+            string paramName = $"@param{i}";
+            parameterNames.Add(paramName);
+
+            int index = query.IndexOf('?');
+            if (index == -1)
+            {
+                throw new ArgumentException("Mismatch between placeholders and parameters.");
+            }
+
+            query = string.Concat(query.Substring(0, index), paramName, query.Substring(index + 1));
+        }
+
+        command.CommandText = query;
+
+        // Add parameters to the command
+        for (int i = 0; i < parameters.Length; i++)
+        {
+            command.Parameters.AddWithValue(parameterNames[i], parameters[i] ?? DBNull.Value);
+        }
+    }
+
+    public async Task<NonQueryResult> Execute(string query, object[]? parameters = null)
+    {
+        using var command = Db.CreateCommand();
+        PrepareCommand(command, query, parameters);
+
+        int rowsAffected = await command.ExecuteNonQueryAsync();
+
+        return new NonQueryResult
+        {
+            InsertId = raw.sqlite3_last_insert_rowid(Db.Handle),
+            RowsAffected = rowsAffected
+        };
+    }
+
+
+    public async Task<QueryResult> ExecuteQuery(string query, object[]? parameters = null)
+    {
+        var result = new QueryResult();
+        using var command = Db.CreateCommand();
+        PrepareCommand(command, query, parameters);
+
+        var rows = new List<Dictionary<string, object>>();
+
+        using var reader = await command.ExecuteReaderAsync();
+
+        while (await reader.ReadAsync())
+        {
+            var row = new Dictionary<string, object>();
+            for (int i = 0; i < reader.FieldCount; i++)
+            {
+                // TODO: What should we do with null values?
+                row[reader.GetName(i)] = reader.IsDBNull(i) ? null : reader.GetValue(i);
+            }
+            rows.Add(row);
+        }
+
+        result.Rows.Array = rows;
+        return result;
+    }
+
+    public async Task<T[]> GetAll<T>(string sql, object[]? parameters = null)
+    {
+        var result = await ExecuteQuery(sql, parameters);
+
+        // If there are no rows, return an empty array.
+        if (result.Rows.Array.Count == 0)
+        {
+            return [];
+        }
+
+        var items = new List<T>();
+
+        // TODO: Improve mapping errors for when the result fields don't match the target type.
+        // TODO: This conversion may be a performance bottleneck, it's the easiest mechamisn for getting result typing.
+        foreach (var row in result.Rows.Array)
+        {
+            if (row != null)
+            {
+                // Serialize the row to JSON and then deserialize it into type T.
+                string json = JsonConvert.SerializeObject(row);
+                T item = JsonConvert.DeserializeObject<T>(json)!;
+                items.Add(item);
+            }
+        }
+
+        return [.. items];
+    }
+
+    public async Task<T?> GetOptional<T>(string sql, object[]? parameters = null)
+    {
+        var result = await ExecuteQuery(sql, parameters);
+
+        // If there are no rows, return null
+        if (result.Rows.Array.Count == 0)
+        {
+            return default;
+        }
+
+        var firstRow = result.Rows.Array[0];
+
+        if (firstRow == null)
+        {
+            return default;
+        }
+
+        // TODO: Improve mapping errors for when the result fields don't match the target type.
+        // TODO: This conversion may be a performance bottleneck, it's the easiest mechamisn for getting result typing.
+        string json = JsonConvert.SerializeObject(firstRow);
+        return JsonConvert.DeserializeObject<T>(json);
+    }
+
+    public async Task<T> Get<T>(string sql, object[]? parameters = null)
+    {
+        return await GetOptional<T>(sql, parameters) ?? throw new InvalidOperationException("Result set is empty");
+    }
+
+    public new void Close()
+    {
+        base.Close();
+        Db.Close();
+    }
+
+    public async Task RefreshSchema()
+    {
+        await Get<object>("PRAGMA table_info('sqlite_master')");
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteDBOpenFactory.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteDBOpenFactory.cs
new file mode 100644
index 0000000..d0bae37
--- /dev/null
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteDBOpenFactory.cs
@@ -0,0 +1,28 @@
+namespace PowerSync.Common.MDSQLite;
+
+using PowerSync.Common.Client;
+using PowerSync.Common.DB;
+
+public class MDSQLiteOpenFactoryOptions : SQLOpenOptions
+{
+    public MDSQLiteOptions? SqliteOptions { get; set; }
+}
+
+public class MDSqliteDBOpenFactory : ISQLOpenFactory
+{
+    private readonly MDSQLiteOpenFactoryOptions options;
+
+    public MDSqliteDBOpenFactory(MDSQLiteOpenFactoryOptions options)
+    {
+        this.options = options;
+    }
+
+    public IDBAdapter OpenDatabase()
+    {
+        return new MDSQLiteAdapter(new MDSQLiteAdapterOptions
+        {
+            Name = options.DbFilename,
+            SqliteOptions = options.SqliteOptions
+        });
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs
new file mode 100644
index 0000000..f44f708
--- /dev/null
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs
@@ -0,0 +1,137 @@
+namespace PowerSync.Common.MDSQLite;
+
+public sealed class TemporaryStorageOption
+{
+    public static readonly TemporaryStorageOption MEMORY = new("memory");
+    public static readonly TemporaryStorageOption FILESYSTEM = new("file");
+
+    public string Value { get; }
+    private TemporaryStorageOption(string value) => Value = value;
+    public override string ToString() => Value;
+    public static implicit operator string(TemporaryStorageOption option) => option.Value;
+}
+
+/// <summary>
+/// SQLite journal mode. Set on the primary connection.
+/// This library is written with WAL mode in mind - other modes may cause
+/// unexpected locking behavior.
+/// </summary>
+public sealed class SqliteJournalMode
+{
+    /// <summary>
+    /// Use a write-ahead log instead of a rollback journal.
+    /// This provides good performance and concurrency.
+    /// </summary>
+    public static readonly SqliteJournalMode WAL = new("WAL");
+    public static readonly SqliteJournalMode DELETE = new("DELETE");
+    public static readonly SqliteJournalMode TRUNCATE = new("TRUNCATE");
+    public static readonly SqliteJournalMode PERSIST = new("PERSIST");
+    public static readonly SqliteJournalMode MEMORY = new("MEMORY");
+    public static readonly SqliteJournalMode OFF = new("OFF");
+
+    public string Value { get; }
+
+    private SqliteJournalMode(string value) => Value = value;
+
+    public override string ToString() => Value;
+
+    public static implicit operator string(SqliteJournalMode mode) => mode.Value;
+}
+
+/// <summary>
+/// SQLite file commit mode.
+/// </summary>
+public sealed class SqliteSynchronous
+{
+    public static readonly SqliteSynchronous NORMAL = new("NORMAL");
+    public static readonly SqliteSynchronous FULL = new("full");
+    public static readonly SqliteSynchronous OFF = new("OFF");
+
+    public string Value { get; }
+    private SqliteSynchronous(string value) => Value = value;
+    public override string ToString() => Value;
+    public static implicit operator string(SqliteSynchronous mode) => mode.Value;
+}
+
+
+public class SqliteExtension
+{
+    public string Path { get; set; } = string.Empty;
+    public string? EntryPoint { get; set; }
+}
+
+public class MDSQLiteOptions
+{
+    /// <summary>
+    /// SQLite journal mode. Defaults to WAL.
+    /// </summary>
+    public SqliteJournalMode? JournalMode { get; set; }
+
+    /// <summary>
+    /// SQLite synchronous flag. Defaults to NORMAL, which is safe for WAL mode.
+    /// </summary>
+    public SqliteSynchronous? Synchronous { get; set; }
+
+    /// <summary>
+    /// Journal/WAL size limit. Defaults to 6MB.
+    /// </summary>
+    public int? JournalSizeLimit { get; set; }
+
+    /// <summary>
+    /// Timeout in milliseconds waiting for locks to be released by other connections.
+    /// Defaults to 30 seconds.
+    /// </summary>
+    public int? LockTimeoutMs { get; set; }
+
+    /// <summary>
+    /// Encryption key for the database.
+    /// If set, the database will be encrypted using SQLCipher.
+    /// </summary>
+    public string? EncryptionKey { get; set; }
+
+    /// <summary>
+    /// Where to store SQLite temporary files. Defaults to MEMORY.
+    /// </summary>
+    public TemporaryStorageOption? TemporaryStorage { get; set; }
+
+    /// <summary>
+    /// Maximum SQLite cache size. Defaults to 50MB.
+    /// </summary>
+    public int? CacheSizeKb { get; set; }
+
+    /// <summary>
+    /// Load extensions using the path and entryPoint.
+    /// </summary>
+    public SqliteExtension[]? Extensions { get; set; }
+}
+
+
+public class RequiredMDSQLiteOptions : MDSQLiteOptions
+{
+    public static RequiredMDSQLiteOptions DEFAULT_SQLITE_OPTIONS = new()
+    {
+        JournalMode = SqliteJournalMode.WAL,
+        Synchronous = SqliteSynchronous.NORMAL,
+        JournalSizeLimit = 6 * 1024 * 1024,
+        CacheSizeKb = 50 * 1024,
+        TemporaryStorage = TemporaryStorageOption.MEMORY,
+        LockTimeoutMs = 30000,
+        EncryptionKey = null,
+        Extensions = []
+    };
+
+    public new SqliteJournalMode JournalMode { get; set; } = null!;
+
+    public new SqliteSynchronous Synchronous { get; set; } = null!;
+
+    public new int JournalSizeLimit { get; set; }
+
+    public new int LockTimeoutMs { get; set; }
+
+    public new string? EncryptionKey { get; set; }
+
+    public new TemporaryStorageOption TemporaryStorage { get; set; } = null!;
+
+    public new int CacheSizeKb { get; set; }
+    public new SqliteExtension[] Extensions { get; set; } = null!;
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/PowerSync.Common.csproj b/PowerSync/PowerSync.Common/PowerSync.Common.csproj
new file mode 100644
index 0000000..3c85f7e
--- /dev/null
+++ b/PowerSync/PowerSync.Common/PowerSync.Common.csproj
@@ -0,0 +1,27 @@
+<Project Sdk="Microsoft.NET.Sdk">
+
+  <PropertyGroup>
+    <TargetFrameworks>netstandard2.0;net6.0;net8.0</TargetFrameworks>
+    <LangVersion>12</LangVersion>
+    <ImplicitUsings>enable</ImplicitUsings>
+    <Nullable>enable</Nullable>
+    <VersionPrefix>0.0.1</VersionPrefix>
+    <VersionSuffix>alpha</VersionSuffix>
+  </PropertyGroup>
+
+  <ItemGroup>
+    <PackageReference Include="Microsoft.Data.Sqlite" Version="9.0.1" />
+    <PackageReference Include="Microsoft.Extensions.Logging" Version="6.0.0" />
+    <PackageReference Include="Microsoft.Extensions.Logging.Console" Version="6.0.0" />
+    <PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
+    <PackageReference Include="Nito.AsyncEx" Version="5.1.2" />
+    <PackageReference Include="System.Threading.Channels" Version="8.0.0" />
+  </ItemGroup>
+
+  <ItemGroup>
+    <Content Include="runtimes\**\*.*">
+      <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+    </Content>
+  </ItemGroup>
+
+</Project>
diff --git a/PowerSync/PowerSync.Common/README.md b/PowerSync/PowerSync.Common/README.md
new file mode 100644
index 0000000..186b3ed
--- /dev/null
+++ b/PowerSync/PowerSync.Common/README.md
@@ -0,0 +1,48 @@
+# PowerSync SDK .NET Common 
+
+This package contains a .NET implementation of a PowerSync database connector and streaming sync bucket implementation.
+
+## Usage 
+
+### Simple Query
+
+```csharp
+
+private record ListResult(string id, string name, string owner_id, string created_at);
+
+static async Task Main() {
+
+    var db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+        Database = new SQLOpenOptions { DbFilename = "cli-example.db" },
+        Schema = AppSchema.PowerSyncSchema,
+    });
+    await db.Init();
+
+    var lists = await db.GetAll<ListResult>("select * from lists");
+}
+
+```
+
+
+### Watched queries
+Watched queries will automatically update when a dependant table is updated.
+
+```csharp
+db.Watch("select * from lists", null, new WatchHandler<ListResult>
+{
+    OnResult = (results) =>
+    {
+        table.Rows.Clear();
+        foreach (var line in results)
+        {
+            table.AddRow(line.id, line.name, line.owner_id, line.created_at);
+        }
+    },
+    OnError = (error) =>
+    {
+        Console.WriteLine("Error: " + error.Message);
+    }
+});
+
+```
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Utils/EventStream.cs b/PowerSync/PowerSync.Common/Utils/EventStream.cs
new file mode 100644
index 0000000..38afd48
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Utils/EventStream.cs
@@ -0,0 +1,159 @@
+namespace PowerSync.Common.Utils;
+
+using System.Collections.Concurrent;
+using System.Runtime.CompilerServices;
+using System.Threading.Channels;
+
+public interface IEventStream<T>
+{
+    void Emit(T item);
+
+    Task EmitAsync(T item);
+
+    CancellationTokenSource RunListenerAsync(
+    Func<T, Task> callback);
+
+    IAsyncEnumerable<T> ListenAsync(CancellationToken cancellationToken);
+
+    CancellationTokenSource RunListener(Action<T> callback);
+
+    IEnumerable<T> Listen(CancellationToken cancellationToken);
+
+    void Close();
+}
+
+public class EventStream<T> : IEventStream<T>
+{
+
+    // Closest implementation to a ConcurrentSet<T> in .Net
+    private readonly ConcurrentDictionary<Channel<T>, byte> subscribers = new();
+
+    public int SubscriberCount()
+    {
+        return subscribers.Count;
+    }
+
+    public void Emit(T item)
+    {
+        foreach (var subscriber in subscribers.Keys)
+        {
+            subscriber.Writer.TryWrite(item);
+        }
+    }
+
+    public async Task EmitAsync(T item)
+    {
+        foreach (var subscriber in subscribers.Keys)
+        {
+            await subscriber.Writer.WriteAsync(item);
+        }
+    }
+
+    public CancellationTokenSource RunListenerAsync(
+    Func<T, Task> callback)
+    {
+        var cts = new CancellationTokenSource();
+
+        _ = Task.Run(async () =>
+        {
+            await foreach (var value in ListenAsync(cts.Token))
+            {
+                await callback(value);
+            }
+
+        }, cts.Token);
+
+        return cts;
+    }
+
+    public IAsyncEnumerable<T> ListenAsync(CancellationToken cancellationToken)
+    {
+        var channel = Channel.CreateUnbounded<T>();
+        subscribers.TryAdd(channel, 0);
+        return ReadFromChannelAsync(channel, cancellationToken);
+    }
+
+    public CancellationTokenSource RunListener(Action<T> callback)
+    {
+        var cts = new CancellationTokenSource();
+
+        _ = Task.Run(() =>
+        {
+            foreach (var value in Listen(cts.Token))
+            {
+                callback(value);
+            }
+        }, cts.Token);
+
+        return cts;
+    }
+
+    public IEnumerable<T> Listen(CancellationToken cancellationToken)
+    {
+        var channel = Channel.CreateUnbounded<T>();
+        subscribers.TryAdd(channel, 0);
+        return ReadFromChannel(channel, cancellationToken);
+    }
+
+    private async IAsyncEnumerable<T> ReadFromChannelAsync(
+    Channel<T> channel,
+    [EnumeratorCancellation] CancellationToken cancellationToken)
+    {
+        try
+        {
+            // .Net 4.8 friendly way of reading from the channel
+            while (await channel.Reader.WaitToReadAsync(cancellationToken))
+            {
+                while (channel.Reader.TryRead(out var item))
+                {
+                    yield return item;
+
+                    // Check cancellation between iterations
+                    if (cancellationToken.IsCancellationRequested)
+                    {
+                        yield break;
+                    }
+                }
+            }
+        }
+        finally
+        {
+            RemoveSubscriber(channel);
+        }
+    }
+
+    private IEnumerable<T> ReadFromChannel(Channel<T> channel, CancellationToken cancellationToken)
+    {
+        try
+        {
+            while (!cancellationToken.IsCancellationRequested)
+            {
+                if (channel.Reader.WaitToReadAsync(cancellationToken).AsTask().Result)
+                {
+                    while (channel.Reader.TryRead(out var item))
+                    {
+                        yield return item;
+                    }
+                }
+            }
+        }
+        finally
+        {
+            RemoveSubscriber(channel);
+        }
+    }
+
+    public void Close()
+    {
+        foreach (var subscriber in subscribers.Keys)
+        {
+            subscriber.Writer.TryComplete();
+            RemoveSubscriber(subscriber);
+        }
+    }
+
+    private void RemoveSubscriber(Channel<T> channel)
+    {
+        subscribers.TryRemove(channel, out _);
+    }
+}
\ No newline at end of file
diff --git a/PowerSync/PowerSync.Common/Utils/PowerSyncPathResolver.cs b/PowerSync/PowerSync.Common/Utils/PowerSyncPathResolver.cs
new file mode 100644
index 0000000..3e203d7
--- /dev/null
+++ b/PowerSync/PowerSync.Common/Utils/PowerSyncPathResolver.cs
@@ -0,0 +1,59 @@
+namespace PowerSync.Common.Utils;
+
+using System.Runtime.InteropServices;
+
+public static class PowerSyncPathResolver
+{
+    public static string GetNativeLibraryPath(string packagePath)
+    {
+        string rid = GetRuntimeIdentifier();
+        string nativeDir = Path.Combine(packagePath, "runtimes", rid, "native");
+
+        string fileName = GetFileNameForPlatform();
+
+        return Path.Combine(nativeDir, fileName);
+    }
+
+    private static string GetRuntimeIdentifier()
+    {
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
+        {
+            if (RuntimeInformation.ProcessArchitecture == Architecture.Arm64)
+                return "osx-arm64";
+            else
+                return "osx-x64";
+        }
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
+        {
+            if (RuntimeInformation.ProcessArchitecture == Architecture.Arm64)
+                return "linux-arm64";
+            else
+                return "linux-x64";
+        }
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+        {
+            return "win-x64";
+        }
+        throw new PlatformNotSupportedException("Unsupported platform.");
+    }
+
+    private static string GetFileNameForPlatform()
+    {
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
+        {
+            return "libpowersync.dylib";
+        }
+        else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
+        {
+            return "libpowersync.so";
+        }
+        else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+        {
+            return "powersync.dll";
+        }
+        else
+        {
+            throw new PlatformNotSupportedException("Unsupported platform.");
+        }
+    }
+}
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..ac6e360
--- /dev/null
+++ b/README.md
@@ -0,0 +1,57 @@
+<p align="center">
+  <a href="https://www.powersync.com" target="_blank"><img src="https://github.com/powersync-ja/.github/assets/7372448/d2538c43-c1a0-4c47-9a76-41462dba484f"/></a>
+</p>
+
+_[PowerSync](https://www.powersync.com) is a sync engine for building local-first apps with instantly-responsive UI/UX and simplified state transfer. Syncs between SQLite on the client-side and Postgres, MongoDB or MySQL on the server-side._
+
+# PowerSync .NET SDKs
+
+`powersync-dotnet` is the monorepo for PowerSync .NET SDKs.
+
+## Monorepo Structure: Packages
+
+- [PowerSync/Common](./PowerSync/Common/README.md)
+
+  - Core package: .NET implementation of a PowerSync database connector and streaming sync bucket implementation. Packages meant for specific platforms will extend functionality of `Common`.
+
+## Demo Apps / Example Projects
+
+Demo applications are located in the [`demos/`](./demos/) directory. Also see our [Demo Apps / Example Projects](https://docs.powersync.com/resources/demo-apps-example-projects) gallery which lists all projects by the backend and client-side framework they use.
+
+### Command-Line
+
+- [demos/Command-Line/CLI](./demos/Command-Line/CLI/README.md): A CLI to-do list example app using a Node-js backend.
+
+# Supported Frameworks
+
+This monorepo currently targets the following .NET versions:
+-	**.NET 8** ([primary target](https://dotnet.microsoft.com/en-us/download/dotnet/8.0), recommended for all new projects)
+- **.NET 6** (supported for compatibility with older projects)
+-	**.NET Standard 2.0** (for compatibility with older libraries and frameworks)
+
+We are also actively working towards adding support for **.NET Framework 4.8** to enable compatibility with legacy applications.
+
+When running commands such as `dotnet run` or `dotnet test`, you may need to specify the target framework explicitly using the `--framework` flag.
+
+# Development
+
+Install dependencies
+
+```bash
+dotnet restore
+```
+
+## Tests
+
+Run all tests
+
+```bash
+dotnet test -v n --framework net8.0
+```
+
+Run a specific test
+
+```bash
+dotnet test -v n --framework net8.0 --filter "test-file-pattern"  
+```
+
diff --git a/Tests/PowerSync/.DS_Store b/Tests/PowerSync/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..58bdd5a41c80dc73d0872ac081b3828c53c50d9a
GIT binary patch
literal 6148
zcmeHKJ5Iw;5S)b+k)TLP`A&eu4W2?cK{x;fhz1;q5NcPPD@V)hM~KLwprAps*6zHw
z-g%a%@OlB*a`$)xEC9^uj`;L2HNWpZv9rn;k<K$V7;ueyJYhSjKAmvxC2q-l<@KHa
zj_t?#ZFt!X&w2jo1C*5lQa}nw0VyB_ex-o-UfTR3QBev=0V(jMfPWtv-LV&riSg-R
zh!KD|V>*oMm?enK6U1IPCNe{_q!N>AHDXxOnQv9s3&+Hy!)o}jda~7oV)1mI-=ZAW
z6BVU^6gXDkJhuz)|9A8s=Ko`oc2YnJ{3``)wpy>2e5LBGlb7>e+vpE;ui4YxxDE<K
mv}0nlV{W`1Uqw;YHDB|7FB}tt&V0~``WbLtWK!U-6*vIhE)~@P

literal 0
HcmV?d00001

diff --git a/Tests/PowerSync/PowerSync.Common.Tests/BucketStorageTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/BucketStorageTests.cs
new file mode 100644
index 0000000..a33b240
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/BucketStorageTests.cs
@@ -0,0 +1,1049 @@
+namespace PowerSync.Common.Tests;
+
+using System.Threading.Tasks;
+
+using Microsoft.Data.Sqlite;
+using Microsoft.Extensions.Logging;
+
+using PowerSync.Common.Client;
+using PowerSync.Common.Client.Sync.Bucket;
+using PowerSync.Common.DB.Schema;
+
+class TestData
+{
+    public static OplogEntry putAsset1_1 = OplogEntry.FromRow(new OplogEntryJSON
+    {
+        OpId = "1",
+        Op = new OpType(OpTypeEnum.PUT).ToJSON(),
+        ObjectType = "assets",
+        ObjectId = "O1",
+        Data = new { description = "bar" },
+        Checksum = 1
+    });
+
+    public static OplogEntry putAsset2_2 = OplogEntry.FromRow(new OplogEntryJSON
+    {
+        OpId = "2",
+        Op = new OpType(OpTypeEnum.PUT).ToJSON(),
+        ObjectType = "assets",
+        ObjectId = "O2",
+        Data = new { description = "bar" },
+        Checksum = 2
+    });
+
+    public static OplogEntry putAsset1_3 = OplogEntry.FromRow(new OplogEntryJSON
+    {
+        OpId = "3",
+        Op = new OpType(OpTypeEnum.PUT).ToJSON(),
+        ObjectType = "assets",
+        ObjectId = "O1",
+        Data = new { description = "bard" },
+        Checksum = 3
+    });
+
+    public static OplogEntry removeAsset1_4 = OplogEntry.FromRow(new OplogEntryJSON
+    {
+        OpId = "4",
+        Op = new OpType(OpTypeEnum.REMOVE).ToJSON(),
+        ObjectType = "assets",
+        ObjectId = "O1",
+        Checksum = 4
+    });
+
+    public static OplogEntry removeAsset1_5 = OplogEntry.FromRow(new OplogEntryJSON
+    {
+        OpId = "5",
+        Op = new OpType(OpTypeEnum.REMOVE).ToJSON(),
+        ObjectType = "assets",
+        ObjectId = "O1",
+        Checksum = 5
+    });
+
+    public static Table assets = new Table(new Dictionary<string, ColumnType>
+        {
+            { "created_at", ColumnType.TEXT },
+            { "make", ColumnType.TEXT },
+            { "model", ColumnType.TEXT },
+            { "serial_number", ColumnType.TEXT },
+            { "quantity", ColumnType.INTEGER },
+            { "user_id", ColumnType.TEXT },
+            { "customer_id", ColumnType.TEXT },
+            { "description", ColumnType.TEXT },
+        }, new TableOptions
+        {
+            Indexes = new Dictionary<string, List<string>> { { "makemodel", new List<string> { "make", "model" } } }
+        });
+
+    public static Table customers = new Table(new Dictionary<string, ColumnType>
+        {
+            { "name", ColumnType.TEXT },
+            { "email", ColumnType.TEXT }
+        });
+
+
+    public static Schema appSchema = new Schema(new Dictionary<string, Table>
+        {
+            { "assets", assets },
+            { "customers", customers }
+        });
+
+}
+
+public class BucketStorageTests : IAsyncLifetime
+{
+    private PowerSyncDatabase db = default!;
+    private IBucketStorageAdapter bucketStorage = default!;
+
+
+    public async Task InitializeAsync()
+    {
+        db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = "powersync.db" },
+            Schema = TestData.appSchema,
+        });
+        await db.Init();
+        bucketStorage = new SqliteBucketStorage(db.Database, createLogger());
+
+    }
+
+    public async Task DisposeAsync()
+    {
+        await db.DisconnectAndClear();
+        await db.Close();
+        bucketStorage.Close();
+    }
+
+    private record IdResult(string id);
+    private record DescriptionResult(string description);
+    private record AssetResult(string id, string description, string? make = null);
+
+    static async Task ExpectAsset1_3(PowerSyncDatabase database)
+    {
+        var result = await database.GetAll<AssetResult>("SELECT id, description, make FROM assets WHERE id = 'O1'");
+        Assert.Equal(new AssetResult("O1", "bard", null), result[0]);
+    }
+
+    static async Task ExpectNoAsset1(PowerSyncDatabase database)
+    {
+        var result = await database.GetAll<AssetResult>("SELECT id, description, make FROM assets WHERE id = 'O1'");
+        Assert.Empty(result);
+    }
+
+    static async Task ExpectNoAssets(PowerSyncDatabase database)
+    {
+        var result = await database.GetAll<AssetResult>("SELECT id, description, make FROM assets");
+        Assert.Empty(result);
+    }
+
+    async Task SyncLocalChecked(Checkpoint checkpoint)
+    {
+        var result = await bucketStorage.SyncLocalDatabase(checkpoint);
+        Assert.Equal(new SyncLocalDatabaseResult { Ready = true, CheckpointValid = true }, result);
+    }
+
+    private ILogger createLogger()
+    {
+        ILoggerFactory loggerFactory = LoggerFactory.Create(builder =>
+        {
+            builder.AddConsole(); // Enable console logging
+            builder.SetMinimumLevel(LogLevel.Debug);
+        });
+
+        return loggerFactory.CreateLogger("TestLogger");
+    }
+
+    [Fact]
+    public async Task BasicSetup()
+    {
+        await db.WaitForReady();
+        var initialBucketStates = await bucketStorage.GetBucketStates();
+        Assert.Empty(initialBucketStates);
+
+        await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)]));
+
+        var bucketStates = await bucketStorage.GetBucketStates();
+
+        Assert.Collection(bucketStates, state =>
+        {
+            Assert.Equal("bucket1", state.Bucket);
+            Assert.Equal("3", state.OpId);
+        });
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        await ExpectAsset1_3(db);
+    }
+
+    [Fact]
+    public async Task ShouldGetObjectFromMultipleBuckets()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 3 }]
+        });
+
+        await ExpectAsset1_3(db);
+    }
+
+    [Fact]
+    public async Task ShouldPrioritizeLaterUpdates()
+    {
+        // Test behavior when the same object is present in multiple buckets.
+        // In this case, there are two different versions in the different buckets.
+        // While we should not get this with our server implementation, the client still specifies this behavior:
+        // The largest op_id wins.
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_1], false)])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }]
+        });
+
+        await ExpectAsset1_3(db);
+    }
+
+    [Fact]
+    public async Task ShouldIgnoreRemoveFromOneBucket()
+    {
+        // When we have 1 PUT and 1 REMOVE, the object must be kept.);   
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "4",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }]
+        });
+
+        await ExpectAsset1_3(db);
+    }
+
+    [Fact]
+    public async Task ShouldRemoveWhenRemovedFromAllBuckets()
+    {
+        // When we only have REMOVE left for an object, it must be deleted.
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3, TestData.removeAsset1_5], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3, TestData.removeAsset1_4], false)])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "5",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }, new BucketChecksum { Bucket = "bucket2", Checksum = 7 }]
+        });
+
+        await ExpectNoAssets(db);
+    }
+
+    [Fact]
+    public async Task ShouldUseSubkeys()
+    {
+        // Subkeys cause this to be treated as a separate entity in the oplog,
+        // but the same entity in the local database.
+
+        var put4 = OplogEntry.FromRow(new OplogEntryJSON
+        {
+            OpId = "4",
+            Op = new OpType(OpTypeEnum.PUT).ToJSON(),
+            Subkey = "b",
+            ObjectType = "assets",
+            ObjectId = "O1",
+            Data = new { description = "B" },
+            Checksum = 4
+        });
+
+        var remove5 = OplogEntry.FromRow(new OplogEntryJSON
+        {
+            OpId = "5",
+            Op = new OpType(OpTypeEnum.REMOVE).ToJSON(),
+            Subkey = "b",
+            ObjectType = "assets",
+            ObjectId = "O1",
+            Checksum = 5
+        });
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3, put4], false)])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "4",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 8 }]
+        });
+
+        var result = await db.GetAll<AssetResult>("SELECT id, description, make FROM assets WHERE id = 'O1'");
+        Assert.Equal(new AssetResult("O1", "B", null), result[0]);
+
+        await bucketStorage.SaveSyncData(new SyncDataBatch([new SyncDataBucket("bucket1", [remove5], false)]));
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "5",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 13 }]
+        });
+
+        await ExpectAsset1_3(db);
+    }
+
+    [Fact]
+    public async Task ShouldFailChecksumValidation()
+    {
+        // Simple checksum validation
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)])
+        );
+
+        var result = await bucketStorage.SyncLocalDatabase(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 10 }, new BucketChecksum { Bucket = "bucket2", Checksum = 1 }]
+        });
+
+        var expected = new SyncLocalDatabaseResult
+        {
+            Ready = false,
+            CheckpointValid = false,
+            CheckpointFailures = ["bucket1", "bucket2"]
+        };
+
+        Assert.Equal(expected, result);
+
+        await ExpectNoAssets(db);
+    }
+
+    [Fact]
+    public async Task ShouldDeleteBuckets()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [new SyncDataBucket("bucket1", [TestData.putAsset1_3], false), new SyncDataBucket("bucket2", [TestData.putAsset1_3], false)])
+        );
+
+        await bucketStorage.RemoveBuckets(["bucket2"]);
+        // The delete only takes effect after syncLocal.
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 3 }]
+        });
+
+        // Bucket is deleted, but object is still present in other buckets.
+        await ExpectAsset1_3(db);
+
+        await bucketStorage.RemoveBuckets(["bucket1"]);
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = []
+        });
+
+        // Both buckets deleted - object removed.
+        await ExpectNoAssets(db);
+    }
+
+    [Fact]
+    public async Task ShouldDeleteAndRecreateBuckets()
+    {
+        // Save some data
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1], false)])
+        );
+
+        // Delete the bucket
+        await bucketStorage.RemoveBuckets(["bucket1"]);
+
+        // Save some data again
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)])
+        );
+
+        // Delete again
+        await bucketStorage.RemoveBuckets(["bucket1"]);
+
+        // Final save of data
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset1_3], false)])
+        );
+
+        // Check that the data is there
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }]
+        });
+
+        await ExpectAsset1_3(db);
+
+        // Now final delete
+        await bucketStorage.RemoveBuckets(["bucket1"]);
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = []
+        });
+
+        await ExpectNoAssets(db);
+    }
+
+    [Fact]
+    public async Task ShouldHandleMove()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1",
+                [
+                    OplogEntry.FromRow(new OplogEntryJSON
+                    {
+                        OpId = "1",
+                        Op = new OpType(OpTypeEnum.MOVE).ToJSON(),
+                        Checksum = 1
+                    })
+                ], false)
+            ])
+        );
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch([new SyncDataBucket("bucket1", [TestData.putAsset1_3], false)])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 4 }]
+        });
+
+        await ExpectAsset1_3(db);
+    }
+
+    [Fact]
+    public async Task ShouldHandleClear()
+    {
+        // Save some data
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "1",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 1 }
+        ]
+        });
+
+        // CLEAR, then save new data
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1",
+                [
+                    OplogEntry.FromRow(new OplogEntryJSON
+                    {
+                        OpId = "2",
+                        Op = new OpType(OpTypeEnum.CLEAR).ToJSON(),
+                        Checksum = 2
+                    }),
+                    OplogEntry.FromRow(new OplogEntryJSON
+                    {
+                        OpId = "3",
+                        Op = new OpType(OpTypeEnum.PUT).ToJSON(),
+                        Checksum = 3,
+                        Data = TestData.putAsset2_2.Data,
+                        ObjectId = TestData.putAsset2_2.ObjectId,
+                        ObjectType = TestData.putAsset2_2.ObjectType
+                    })
+                ], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            // 2 + 3. 1 is replaced with 2.
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 5 }]
+        });
+
+        await ExpectNoAsset1(db);
+
+        var result = await db.Get<AssetResult>("SELECT id, description FROM assets WHERE id = 'O2'");
+
+        Assert.Equal(new AssetResult("O2", "bar"), result);
+    }
+
+    [Fact]
+    public async Task UpdateWithNewTypes()
+    {
+        var dbName = "test-bucket-storage-new-types.db";
+        var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = dbName },
+            Schema = new Schema([]),
+        });
+        await powersync.Init();
+        bucketStorage = new SqliteBucketStorage(powersync.Database);
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "4",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        // Ensure an exception is thrown due to missing table
+        await Assert.ThrowsAsync<SqliteException>(async () =>
+            await powersync.GetAll<AssetResult>("SELECT * FROM assets"));
+
+        await powersync.Close();
+
+        powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = dbName },
+            Schema = TestData.appSchema,
+        });
+        await powersync.Init();
+
+        await ExpectAsset1_3(powersync);
+
+        await powersync.DisconnectAndClear();
+        await powersync.Close();
+    }
+
+    [Fact]
+    public async Task ShouldRemoveTypes()
+    {
+        var dbName = "test-bucket-storage-remove-types.db";
+
+        // Create database with initial schema
+        var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = dbName },
+            Schema = TestData.appSchema,
+        });
+
+        await powersync.Init();
+        bucketStorage = new SqliteBucketStorage(powersync.Database);
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        await ExpectAsset1_3(powersync);
+        await powersync.Close();
+
+        // Now open another instance with an empty schema
+        powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = dbName },
+            Schema = new Schema([]),
+        });
+        await powersync.Init();
+
+        await Assert.ThrowsAsync<SqliteException>(async () =>
+            await powersync.Execute("SELECT * FROM assets"));
+
+        await powersync.Close();
+
+        // Reopen database with the original schema
+        powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = dbName },
+            Schema = TestData.appSchema,
+        });
+        await powersync.Init();
+
+        await ExpectAsset1_3(powersync);
+
+        await powersync.DisconnectAndClear();
+        await powersync.Close();
+    }
+
+    private record OplogStats(string Type, string Id, int Count);
+
+    [Fact]
+    public async Task ShouldCompact()
+    {
+        // Test compacting behavior.
+        // This test relies heavily on internals and will have to be updated when the compact implementation is updated.
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.removeAsset1_4], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "4",
+            WriteCheckpoint = "4",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }]
+        });
+
+        await bucketStorage.ForceCompact();
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "4",
+            WriteCheckpoint = "4",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 7 }]
+        });
+
+        var stats = await db.GetAll<OplogStats>(
+            "SELECT row_type as Type, row_id as Id, count(*) as Count FROM ps_oplog GROUP BY row_type, row_id ORDER BY row_type, row_id"
+        );
+
+        var expectedStats = new List<OplogStats> { new("assets", "O2", 1) };
+
+        Assert.Equal(expectedStats, stats);
+    }
+
+    [Fact]
+    public async Task ShouldNotSyncLocalDbWithPendingCrud_ServerRemoved()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        // Local save
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]);
+
+        var insertedResult = await db.GetAll<IdResult>("SELECT id FROM assets WHERE id = 'O3'");
+        Assert.Equal(new IdResult("O3"), insertedResult[0]);
+
+        // At this point, we have data in the CRUD table and are not able to sync the local DB.
+        var result = await bucketStorage.SyncLocalDatabase(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        var expectedResult = new SyncLocalDatabaseResult
+        {
+            Ready = false,
+            CheckpointValid = true
+        };
+
+        Assert.Equal(expectedResult, result);
+
+        var batch = await bucketStorage.GetCrudBatch();
+        if (batch != null)
+        {
+            await batch.Complete("");
+        }
+
+        await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4"));
+
+        // At this point, the data has been uploaded but not synced back yet.
+        var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        Assert.Equal(expectedResult, result3);
+
+        // The data must still be present locally.
+        var stillPresentResult = await db.GetAll<IdResult>("SELECT id FROM assets WHERE id = 'O3'");
+        Assert.Equal(new IdResult("O3"), stillPresentResult[0]);
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+            new SyncDataBucket("bucket1", Array.Empty<OplogEntry>(), false)
+            ])
+        );
+
+        // Now we have synced the data back (or lack of data in this case),
+        // so we can do a local sync.
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "5",
+            WriteCheckpoint = "5",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        // Since the object was not in the sync response, it is deleted.
+        var deletedResult = await db.GetAll<IdResult>("SELECT id FROM assets WHERE id = 'O3'");
+        Assert.Empty(deletedResult);
+    }
+
+    [Fact]
+    public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_1()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        // Local save
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]);
+
+        var batch = await bucketStorage.GetCrudBatch();
+        if (batch != null)
+        {
+            await batch.Complete("");
+        }
+
+        await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4"));
+
+        var result3 = await bucketStorage.SyncLocalDatabase(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        var expectedResult = new SyncLocalDatabaseResult
+        {
+            Ready = false,
+            CheckpointValid = true
+        };
+
+        Assert.Equal(expectedResult, result3);
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", Array.Empty<OplogEntry>(), false)
+            ])
+        );
+
+        // Add more data before SyncLocalDatabase.
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]);
+
+        var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint
+        {
+            LastOpId = "5",
+            WriteCheckpoint = "5",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        Assert.Equal(expectedResult, result4);
+    }
+
+    [Fact]
+    public async Task ShouldNotSyncLocalDbWithPendingCrud_WhenMoreCrudIsAdded_2()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+              new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        // Local save
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]);
+
+        var batch = await bucketStorage.GetCrudBatch();
+
+        // Add more data before calling complete()
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]);
+        if (batch != null)
+        {
+            await batch.Complete("");
+        }
+
+        await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4"));
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+            new SyncDataBucket("bucket1", [], false)
+            ])
+        );
+
+        var result4 = await bucketStorage.SyncLocalDatabase(new Checkpoint
+        {
+            LastOpId = "5",
+            WriteCheckpoint = "5",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        var expected = new SyncLocalDatabaseResult
+        {
+            Ready = false,
+            CheckpointValid = true
+        };
+
+        Assert.Equal(expected, result4);
+    }
+
+    [Fact]
+    public async Task ShouldNotSyncLocalDbWithPendingCrud_UpdateOnServer()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        // Local save
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]);
+
+        var batch = await bucketStorage.GetCrudBatch();
+        if (batch != null)
+        {
+            await batch.Complete("");
+        }
+
+        await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4"));
+
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+            new SyncDataBucket("bucket1",
+            [
+                OplogEntry.FromRow(new OplogEntryJSON
+                {
+                    OpId = "5",
+                    Op = new OpType(OpTypeEnum.PUT).ToJSON(),
+                    ObjectType = "assets",
+                    ObjectId = "O3",
+                    Checksum = 5,
+                    Data = new { description = "server updated" }
+                })
+            ], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "5",
+            WriteCheckpoint = "5",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 11 }
+        ]
+        });
+
+        var updatedResult = await db.GetAll<DescriptionResult>("SELECT description FROM assets WHERE id = 'O3'");
+        Assert.Equal(new DescriptionResult("server updated"), updatedResult[0]);
+    }
+
+    [Fact]
+    public async Task ShouldRevertAFailingInsert()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        // Local insert, later rejected by server
+        await db.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", ["O3", "inserted"]);
+
+        var batch = await bucketStorage.GetCrudBatch();
+        if (batch != null)
+        {
+            await batch.Complete("");
+        }
+
+        await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4"));
+
+        var insertedResult = await db.GetAll<DescriptionResult>("SELECT description FROM assets WHERE id = 'O3'");
+        Assert.Equal(new DescriptionResult("inserted"), insertedResult[0]);
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "4",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        var revertedResult = await db.GetAll<DescriptionResult>("SELECT description FROM assets WHERE id = 'O3'");
+        Assert.Empty(revertedResult);
+    }
+
+    [Fact]
+    public async Task ShouldRevertAFailingDelete()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        // Local delete, later rejected by server
+        await db.Execute("DELETE FROM assets WHERE id = ?", ["O2"]);
+
+        var deletedResult = await db.GetAll<DescriptionResult>("SELECT description FROM assets WHERE id = 'O2'");
+        Assert.Empty(deletedResult); // Ensure the record is deleted locally
+
+        // Simulate a permissions error when uploading - data should be preserved
+        var batch = await bucketStorage.GetCrudBatch();
+        if (batch != null)
+        {
+            await batch.Complete("");
+        }
+
+        await bucketStorage.UpdateLocalTarget(() => Task.FromResult("4"));
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "4",
+            Buckets = [new BucketChecksum { Bucket = "bucket1", Checksum = 6 }]
+        });
+
+        var revertedResult = await db.GetAll<DescriptionResult>("SELECT description FROM assets WHERE id = 'O2'");
+        Assert.Equal(new DescriptionResult("bar"), revertedResult[0]);
+    }
+
+    [Fact]
+    public async Task ShouldRevertAFailingUpdate()
+    {
+        await bucketStorage.SaveSyncData(
+            new SyncDataBatch(
+            [
+                new SyncDataBucket("bucket1", [TestData.putAsset1_1, TestData.putAsset2_2, TestData.putAsset1_3], false)
+            ])
+        );
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "3",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        // Local update, later rejected by server
+        await db.Execute("UPDATE assets SET description = ? WHERE id = ?", ["updated", "O2"]);
+
+        var updatedResult = await db.GetAll<DescriptionResult>("SELECT description FROM assets WHERE id = 'O2'");
+        Assert.Equal(new DescriptionResult("updated"), updatedResult[0]);
+
+        // Simulate a permissions error when uploading - data should be preserved
+        var batch = await bucketStorage.GetCrudBatch();
+        if (batch != null)
+        {
+            await batch.Complete("");
+        }
+
+        await bucketStorage.UpdateLocalTarget(async () => await Task.FromResult("4"));
+
+        await SyncLocalChecked(new Checkpoint
+        {
+            LastOpId = "3",
+            WriteCheckpoint = "4",
+            Buckets =
+            [
+            new BucketChecksum { Bucket = "bucket1", Checksum = 6 }
+        ]
+        });
+
+        var revertedResult = await db.GetAll<DescriptionResult>("SELECT description FROM assets WHERE id = 'O2'");
+        Assert.Equal(new DescriptionResult("bar"), revertedResult[0]);
+    }
+}
\ No newline at end of file
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs
new file mode 100644
index 0000000..7a6dd99
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs
@@ -0,0 +1,114 @@
+namespace PowerSync.Common.Tests;
+
+using PowerSync.Common.Utils;
+using PowerSync.Common.DB.Crud;
+
+public class EventStreamTests
+{
+
+    [Fact]
+    public async Task EventStream_ShouldReceiveTwoMessages_Async()
+    {
+        var eventStream = new EventStream<SyncStatus>();
+
+        var cts = new CancellationTokenSource();
+        var receivedMessages = new List<SyncStatus>();
+
+        var completedTask = new TaskCompletionSource<bool>();
+        var listenerReadySource = new TaskCompletionSource<bool>();
+
+
+        var listenTask = Task.Run(async () =>
+        {
+            var stream = eventStream.ListenAsync(cts.Token);
+
+            listenerReadySource.TrySetResult(true);
+
+            await foreach (var status in stream)
+            {
+                receivedMessages.Add(status);
+
+                if (receivedMessages.Count == 2)
+                {
+                    cts.Cancel();
+                }
+            }
+            completedTask.SetResult(true);
+        });
+
+        await listenerReadySource.Task;
+        Assert.Equal(1, eventStream.SubscriberCount());
+
+        var status1 = new SyncStatus(new SyncStatusOptions
+        {
+            Connected = true,
+        });
+
+        var status2 = new SyncStatus(new SyncStatusOptions
+        {
+            Connected = false,
+        });
+
+        eventStream.Emit(status1);
+        eventStream.Emit(status2);
+
+
+        await completedTask.Task;
+
+        Assert.Equal(2, receivedMessages.Count);
+        Assert.Contains(status1, receivedMessages);
+        Assert.Contains(status2, receivedMessages);
+        Assert.Equal(0, eventStream.SubscriberCount());
+    }
+
+    [Fact]
+    public async Task EventStream_ShouldReceiveTwoMessages_Sync()
+    {
+        var eventStream = new EventStream<SyncStatus>();
+        var cts = new CancellationTokenSource();
+        var receivedMessages = new List<SyncStatus>();
+
+        var completedTask = new TaskCompletionSource<bool>();
+        var listenerReadySource = new TaskCompletionSource<bool>();
+
+        var listenTask = Task.Run(() =>
+        {
+            var stream = eventStream.Listen(cts.Token);
+
+            listenerReadySource.SetResult(true);
+
+            foreach (var status in stream)
+            {
+                receivedMessages.Add(status);
+                if (receivedMessages.Count == 2)
+                {
+                    cts.Cancel();
+                }
+            }
+            completedTask.SetResult(true);
+        });
+
+        await listenerReadySource.Task;
+        Assert.Equal(1, eventStream.SubscriberCount());
+
+        var status1 = new SyncStatus(new SyncStatusOptions
+        {
+            Connected = true,
+        });
+
+        var status2 = new SyncStatus(new SyncStatusOptions
+        {
+            Connected = false,
+        });
+
+        eventStream.Emit(status1);
+        eventStream.Emit(status2);
+
+        await completedTask.Task;
+
+        Assert.Equal(2, receivedMessages.Count);
+        Assert.Contains(status1, receivedMessages);
+        Assert.Contains(status2, receivedMessages);
+        Assert.Equal(0, eventStream.SubscriberCount());
+    }
+}
\ No newline at end of file
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj b/Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj
new file mode 100644
index 0000000..50f5bd9
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj
@@ -0,0 +1,34 @@
+<Project Sdk="Microsoft.NET.Sdk">
+
+  <PropertyGroup>
+    <TargetFrameworks>netstandard2.0;net6.0;net8.0</TargetFrameworks>
+    <LangVersion>12</LangVersion>
+    <ImplicitUsings>enable</ImplicitUsings>
+    <Nullable>enable</Nullable>
+
+    <IsPackable>false</IsPackable>
+    <IsTestProject>true</IsTestProject>
+  </PropertyGroup>
+
+  <ItemGroup>
+    <PackageReference Include="coverlet.collector" Version="6.0.0" />
+    <PackageReference Include="Microsoft.Data.Sqlite" Version="9.0.1" />
+    <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" />
+    <PackageReference Include="sqlite-net-pcl" Version="1.9.172" />
+    <PackageReference Include="SQLitePCLRaw.bundle_e_sqlite3" Version="2.1.10" />
+    <PackageReference Include="xunit" Version="2.5.3" />
+    <PackageReference Include="xunit.runner.visualstudio" Version="2.5.3">
+      <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
+      <PrivateAssets>all</PrivateAssets>
+    </PackageReference>
+  </ItemGroup>
+
+  <ItemGroup>
+    <Using Include="Xunit" />
+  </ItemGroup>
+
+  <ItemGroup>
+    <ProjectReference Include="..\..\..\PowerSync\PowerSync.Common\PowerSync.Common.csproj" />
+  </ItemGroup>
+
+</Project>
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs
new file mode 100644
index 0000000..bc9a81a
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs
@@ -0,0 +1,143 @@
+namespace PowerSync.Common.Tests;
+
+using PowerSync.Common.Client;
+using PowerSync.Common.Client.Connection;
+using PowerSync.Common.DB.Schema;
+using Newtonsoft.Json;
+
+// Some of this can be moved over to general tests/loading version?
+public class PowerSyncCredentialsTests
+{
+    private Schema AppSchema;
+    public PowerSyncCredentialsTests()
+    {
+        var users = new Table(new Dictionary<string, ColumnType>
+        {
+            { "name", ColumnType.TEXT },
+            { "age", ColumnType.INTEGER }
+        });
+
+        var posts = new Table(new Dictionary<string, ColumnType>
+        {
+            { "title", ColumnType.TEXT },
+            { "content", ColumnType.TEXT }
+        });
+
+        AppSchema = new Schema(new Dictionary<string, Table>
+        {
+            { "users", users },
+            { "posts", posts }
+        });
+    }
+
+    [Fact(Skip = "Skipping this test temporarily")]
+    public void SimpleTest()
+    {
+        var endpoint = "http://localhost";
+        var token = "token";
+        var expiresAt = new DateTime();
+        PowerSyncCredentials credentials = new PowerSyncCredentials(endpoint, token, expiresAt);
+        Assert.Equal(endpoint, credentials.Endpoint);
+        Assert.Equal(token, credentials.Token);
+        Assert.Equal(expiresAt, credentials.ExpiresAt);
+    }
+
+    [Fact(Skip = "Skipping this test temporarily")]
+    public async void LoadVersion()
+    {
+        // var db = new MDSAdapter();
+        var db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = "x.db" },
+            Schema = AppSchema,
+        });
+        Console.WriteLine("Pre adapter" + db.SdkVersion);
+        await db.WaitForReady();
+        Console.WriteLine("Post adapter" + db.SdkVersion);
+
+        await db.Execute(@"CREATE TABLE Users (
+        Id INTEGER PRIMARY KEY AUTOINCREMENT,
+        Name TEXT NOT NULL
+        );");
+
+        await db.Execute(@"INSERT INTO Users (Name) VALUES ('Alice');");
+        await db.Execute(@"INSERT INTO Users (Name) VALUES ('Bob');");
+        await db.Execute(@"UPDATE USERS set Name = 'Wonderland' where Name = 'Alice';");
+
+        var x = await db.GetAll<object>("SELECT Name FROM Users limit 1;", []);
+
+        string json = JsonConvert.SerializeObject(x, Formatting.Indented);
+        Console.WriteLine("Result: " + json);
+        // var x = await db.Execute("SELECT powersync_rs_version() as version");
+        // Console.WriteLine(x.Rows.Array.First().First());
+
+        // var x = await db.Execute("SELECT powersync_rs_version() as version");
+        // using var connection = new SqliteConnection("Data Source=:memory:");
+        // var db = new MDSConnection(new MDSConnectionOptions(connection));
+        // connection.Open();
+
+        // string extensionPath = Path.Combine(Directory.GetCurrentDirectory(), "../../../libpowersync.dylib");
+
+        // connection.LoadExtension(extensionPath);
+
+        // var x = await db.Execute("SELECT powersync_rs_version() as version where 1 = 0;");
+        // var x = await db.Execute("SELECT * FROM Users WHERE 1 = 0;");
+
+
+        // Console.WriteLine(x.Rows.Array.First().First().Value);
+        // new AbstractPowerSyncDatabase();
+        // await Task.Delay(5000);
+    }
+
+    private record User(string Name, int Age);
+
+    [Fact]
+    public async void SchemaTest()
+    {
+        var db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = "xxxx.db" },
+            Schema = AppSchema,
+        });
+        await db.DisconnectAndClear();
+        // const schema = new Schema({
+        //   users: new Table({
+        //     name: column.text,
+        //     age: { type: ColumnType.INTEGER }
+        //   }),
+        //   posts: new Table({
+        //     title: column.text,
+        //     content: column.text
+        //   })
+        // });
+
+
+        // var x = await db.GetAll<object>("SELECT name, sql FROM sqlite_master WHERE type='table' ORDER BY name;");
+        // string json = JsonConvert.SerializeObject(x, Formatting.Indented);
+        // Console.WriteLine("Result: " + json);
+        await db.Execute(@"INSERT INTO users (id, name, age) VALUES ('1','Alice', 20);");
+
+        var b = await db.GetAll<object>("SELECT * from users");
+        string jsona = JsonConvert.SerializeObject(b, Formatting.Indented);
+
+        Console.WriteLine("Result xxx: " + jsona);
+        // Console.WriteLine("Result xxx: " + (User)b[0]);
+
+        // var c = await db.Execute("PRAGMA table_info(users);");
+        // string jsonb = JsonConvert.SerializeObject(c.Rows.Array, Formatting.Indented);
+
+        // var k = await db.Database.ReadTransaction(async (tx) =>
+        // {
+        //     Console.WriteLine("reee");
+
+        //     return await tx.Execute("select * from users limit 1");
+        // });
+        // string jsonb = JsonConvert.SerializeObject(k.Rows.Array, Formatting.Indented);
+
+        // Console.WriteLine(jsonb);
+        // 
+
+        // Console.WriteLine(AppSchema.ToJson());
+    }
+
+}
\ No newline at end of file
diff --git a/Tools/Setup/Setup.cs b/Tools/Setup/Setup.cs
new file mode 100644
index 0000000..3cdf9e0
--- /dev/null
+++ b/Tools/Setup/Setup.cs
@@ -0,0 +1,130 @@
+using System;
+using System.IO;
+using System.Net.Http;
+using System.Runtime.InteropServices;
+using System.Threading.Tasks;
+
+public class Setup
+{
+    static async Task Main(string[] args)
+    {
+        const string baseUrl = "https://github.com/powersync-ja/powersync-sqlite-core/releases/download/v0.3.8";
+        const string powersyncCorePath = "../../PowerSync/PowerSync.Common";
+
+        string rid = GetRuntimeIdentifier();
+        string nativeDir = Path.Combine(powersyncCorePath, "runtimes", rid, "native");
+
+        Directory.CreateDirectory(nativeDir);
+
+        string sqliteCoreFilename = GetLibraryForPlatform();
+        string sqliteCorePath = Path.Combine(nativeDir, sqliteCoreFilename);
+
+        try
+        {
+            await DownloadFile($"{baseUrl}/{sqliteCoreFilename}", sqliteCorePath);
+
+            string newFileName = GetFileNameForPlatform();
+            string newFilePath = Path.Combine(nativeDir, newFileName);
+
+            if (File.Exists(sqliteCorePath))
+            {
+                File.Move(sqliteCorePath, newFilePath, overwrite: true);
+                Console.WriteLine($"File renamed successfully from {sqliteCoreFilename} to {newFileName}");
+            }
+            else
+            {
+                throw new IOException($"File {sqliteCoreFilename} does not exist.");
+            }
+        }
+        catch (Exception ex)
+        {
+            Console.Error.WriteLine($"Error: {ex.Message}");
+            Environment.Exit(1);
+        }
+    }
+
+    static string GetRuntimeIdentifier()
+    {
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
+        {
+            if (RuntimeInformation.ProcessArchitecture == Architecture.Arm64)
+                return "osx-arm64";
+            else
+                return "osx-x64";
+        }
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
+        {
+            if (RuntimeInformation.ProcessArchitecture == Architecture.Arm64)
+                return "linux-arm64";
+            else
+                return "linux-x64";
+        }
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+        {
+            return "win-x64";
+        }
+        throw new PlatformNotSupportedException("Unsupported platform.");
+    }
+
+    static string GetFileNameForPlatform()
+    {
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
+        {
+            return "libpowersync.dylib";
+        }
+        else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
+        {
+            return "libpowersync.so";
+        }
+        else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+        {
+            return "powersync.dll";
+        }
+        else
+        {
+            throw new PlatformNotSupportedException("Unsupported platform.");
+        }
+    }
+
+    static string GetLibraryForPlatform()
+    {
+        if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
+        {
+            return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
+                ? "libpowersync_aarch64.dylib"
+                : "libpowersync_x64.dylib";
+        }
+        else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
+        {
+            return RuntimeInformation.ProcessArchitecture == Architecture.Arm64
+                ? "libpowersync_aarch64.so"
+                : "libpowersync_x64.so";
+        }
+        else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+        {
+            return "powersync_x64.dll";
+        }
+        else
+        {
+            throw new PlatformNotSupportedException("Unsupported platform.");
+        }
+    }
+
+    static async Task DownloadFile(string url, string outputPath)
+    {
+        Console.WriteLine($"Downloading: {url}");
+
+        using var httpClient = new HttpClient();
+        using var response = await httpClient.GetAsync(url);
+
+        if (!response.IsSuccessStatusCode)
+        {
+            throw new HttpRequestException($"Failed to download file: {response.StatusCode} {response.ReasonPhrase}");
+        }
+
+        await using var fileStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write, FileShare.None);
+        await response.Content.CopyToAsync(fileStream);
+
+        Console.WriteLine($"Downloaded to {outputPath}");
+    }
+}
\ No newline at end of file
diff --git a/Tools/Setup/Setup.csproj b/Tools/Setup/Setup.csproj
new file mode 100644
index 0000000..83831b3
--- /dev/null
+++ b/Tools/Setup/Setup.csproj
@@ -0,0 +1,7 @@
+<Project Sdk="Microsoft.NET.Sdk">
+
+  <PropertyGroup>
+    <OutputType>Exe</OutputType>
+    <TargetFramework>net8.0</TargetFramework>
+  </PropertyGroup>
+</Project>
\ No newline at end of file
diff --git a/demos/Command-Line/CLI/AppSchema.cs b/demos/Command-Line/CLI/AppSchema.cs
new file mode 100644
index 0000000..726cb8f
--- /dev/null
+++ b/demos/Command-Line/CLI/AppSchema.cs
@@ -0,0 +1,33 @@
+namespace CLI;
+
+using PowerSync.Common.DB.Schema;
+
+class AppSchema
+{
+    public static Table Todos = new Table(new Dictionary<string, ColumnType>
+    {
+        { "list_id", ColumnType.TEXT },
+        { "created_at", ColumnType.TEXT },
+        { "completed_at", ColumnType.TEXT },
+        { "description", ColumnType.TEXT },
+        { "created_by", ColumnType.TEXT },
+        { "completed_by", ColumnType.TEXT },
+        { "completed", ColumnType.INTEGER }
+    }, new TableOptions
+    {
+        Indexes = new Dictionary<string, List<string>> { { "list", new List<string> { "list_id" } } }
+    });
+
+    public static Table Lists = new Table(new Dictionary<string, ColumnType>
+    {
+        { "created_at", ColumnType.TEXT },
+        { "name", ColumnType.TEXT },
+        { "owner_id", ColumnType.TEXT }
+    });
+
+    public static Schema PowerSyncSchema = new Schema(new Dictionary<string, Table>
+    {
+        { "todos", Todos },
+        { "lists", Lists }
+    });
+}
\ No newline at end of file
diff --git a/demos/Command-Line/CLI/CLI.csproj b/demos/Command-Line/CLI/CLI.csproj
new file mode 100644
index 0000000..1ea25bb
--- /dev/null
+++ b/demos/Command-Line/CLI/CLI.csproj
@@ -0,0 +1,25 @@
+<Project Sdk="Microsoft.NET.Sdk">
+
+  <PropertyGroup>
+    <OutputType>Exe</OutputType>
+    <TargetFramework>net8.0</TargetFramework>
+    <LangVersion>12</LangVersion>
+    <ImplicitUsings>enable</ImplicitUsings>
+    <Nullable>enable</Nullable>
+
+
+    <IsPackable>false</IsPackable>
+    <IsTestProject>false</IsTestProject>
+  </PropertyGroup>
+
+   <ItemGroup>
+    <PackageReference Include="Microsoft.Data.Sqlite" Version="9.0.1" />
+    <PackageReference Include="Spectre.Console" Version="0.49.1" />
+
+  </ItemGroup>
+
+
+  <ItemGroup>
+    <ProjectReference Include="..\..\..\PowerSync\Powersync.Common\PowerSync.Common.csproj" />
+  </ItemGroup>
+</Project>
diff --git a/demos/Command-Line/CLI/Demo.cs b/demos/Command-Line/CLI/Demo.cs
new file mode 100644
index 0000000..2da29cc
--- /dev/null
+++ b/demos/Command-Line/CLI/Demo.cs
@@ -0,0 +1,106 @@
+using CLI;
+using PowerSync.Common.Client;
+using Spectre.Console;
+
+class Demo
+{
+
+    private record ListResult(string id, string name, string owner_id, string created_at);
+    static async Task Main()
+    {
+        var db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = "cli-example.db" },
+            Schema = AppSchema.PowerSyncSchema,
+        });
+        await db.Init();
+
+        var connector = new NodeConnector();
+
+        var table = new Table()
+            .AddColumn("id")
+            .AddColumn("name")
+            .AddColumn("owner_id")
+            .AddColumn("created_at");
+
+        Console.WriteLine("Press ESC to exit.");
+        Console.WriteLine("Press Enter to add a new row.");
+        Console.WriteLine("Press Backspace to delete the last row.");
+        Console.WriteLine("");
+
+        bool running = true;
+
+        db.Watch("select * from lists", null, new WatchHandler<ListResult>
+        {
+            OnResult = (results) =>
+            {
+                table.Rows.Clear();
+                foreach (var line in results)
+                {
+                    table.AddRow(line.id, line.name, line.owner_id, line.created_at);
+                }
+            },
+            OnError = (error) =>
+            {
+                Console.WriteLine("Error: " + error.Message);
+            }
+        });
+
+        var _ = Task.Run(async () =>
+         {
+             while (running)
+             {
+                 if (Console.KeyAvailable)
+                 {
+                     var key = Console.ReadKey(intercept: true);
+                     if (key.Key == ConsoleKey.Escape)
+                     {
+                         running = false;
+                     }
+                     else if (key.Key == ConsoleKey.Enter)
+                     {
+                         await db.Execute("insert into lists (id, name, owner_id, created_at) values (uuid(), 'New User', ?, datetime())", [connector.UserId]);
+                     }
+                     else if (key.Key == ConsoleKey.Backspace)
+                     {
+                         await db.Execute("delete from lists where id = (select id from lists order by created_at desc limit 1)");
+                     }
+                 }
+                 Thread.Sleep(100);
+             }
+         });
+
+        await db.Connect(connector);
+        await db.WaitForFirstSync();
+
+        var panel = new Panel(table)
+        {
+            Header = new PanelHeader("")
+        };
+        var connected = false;
+
+        db.RunListener((update) =>
+        {
+            if (update.StatusChanged != null)
+            {
+                connected = update.StatusChanged.Connected;
+            }
+        });
+
+
+        // Start live updating table
+        await AnsiConsole.Live(panel)
+            .StartAsync(async ctx =>
+            {
+                while (running)
+                {
+                    panel.Header = new PanelHeader($"|    Connected: {connected}    |");
+                    await Task.Delay(1000);
+                    ctx.Refresh();
+
+                }
+            });
+
+        Console.WriteLine("\nExited live table. Press any key to exit.");
+    }
+}
\ No newline at end of file
diff --git a/demos/Command-Line/CLI/NodeConnector.cs b/demos/Command-Line/CLI/NodeConnector.cs
new file mode 100644
index 0000000..354d3b8
--- /dev/null
+++ b/demos/Command-Line/CLI/NodeConnector.cs
@@ -0,0 +1,125 @@
+namespace CLI;
+
+using System;
+using System.Collections.Generic;
+using System.Net.Http;
+using System.Text;
+using System.Text.Json;
+using System.Threading.Tasks;
+using System.IO;
+using PowerSync.Common.Client;
+using PowerSync.Common.Client.Connection;
+using PowerSync.Common.DB.Crud;
+
+
+public class NodeConnector : IPowerSyncBackendConnector
+{
+    private static readonly string StorageFilePath = "user_id.txt"; // Simulating local storage
+    private readonly HttpClient _httpClient;
+
+    public string BackendUrl { get; }
+    public string PowerSyncUrl { get; }
+    public string UserId { get; private set; }
+    private string? clientId;
+
+    public NodeConnector()
+    {
+        _httpClient = new HttpClient();
+
+        // Load or generate User ID
+        UserId = LoadOrGenerateUserId();
+
+        BackendUrl = "http://localhost:6060";
+        PowerSyncUrl = "http://localhost:8080";
+
+        clientId = null;
+    }
+
+    public string LoadOrGenerateUserId()
+    {
+        if (File.Exists(StorageFilePath))
+        {
+            return File.ReadAllText(StorageFilePath);
+        }
+
+        string newUserId = Guid.NewGuid().ToString();
+        File.WriteAllText(StorageFilePath, newUserId);
+        return newUserId;
+    }
+
+    public async Task<PowerSyncCredentials?> FetchCredentials()
+    {
+        string tokenEndpoint = "api/auth/token";
+        string url = $"{BackendUrl}/{tokenEndpoint}?user_id={UserId}";
+
+        HttpResponseMessage response = await _httpClient.GetAsync(url);
+        if (!response.IsSuccessStatusCode)
+        {
+            throw new Exception($"Received {response.StatusCode} from {tokenEndpoint}: {await response.Content.ReadAsStringAsync()}");
+        }
+
+        string responseBody = await response.Content.ReadAsStringAsync();
+        var jsonResponse = JsonSerializer.Deserialize<Dictionary<string, string>>(responseBody);
+
+        if (jsonResponse == null || !jsonResponse.ContainsKey("token"))
+        {
+            throw new Exception("Invalid response received from authentication endpoint.");
+        }
+
+        return new PowerSyncCredentials(PowerSyncUrl, jsonResponse["token"]);
+    }
+
+    public async Task UploadData(IPowerSyncDatabase database)
+    {
+        CrudTransaction? transaction;
+        try
+        {
+            transaction = await database.GetNextCrudTransaction();
+        }
+        catch (Exception ex)
+        {
+            Console.WriteLine($"UploadData Error: {ex.Message}");
+            return;
+        }
+
+        if (transaction == null)
+        {
+            return;
+        }
+
+        clientId ??= await database.GetClientId();
+
+        try
+        {
+            var batch = new List<object>();
+
+            foreach (var operation in transaction.Crud)
+            {
+                batch.Add(new
+                {
+                    op = operation.Op.ToString(),
+                    table = operation.Table,
+                    id = operation.Id,
+                    data = operation.OpData
+                });
+            }
+
+            var payload = JsonSerializer.Serialize(new { batch });
+            var content = new StringContent(payload, Encoding.UTF8, "application/json");
+
+            HttpResponseMessage response = await _httpClient.PostAsync($"{BackendUrl}/api/data", content);
+
+            if (!response.IsSuccessStatusCode)
+            {
+                throw new Exception($"Received {response.StatusCode} from /api/data: {await response.Content.ReadAsStringAsync()}");
+            }
+
+            await transaction.Complete();
+        }
+        catch (Exception ex)
+        {
+            Console.WriteLine($"UploadData Error: {ex.Message}");
+            throw;
+        }
+    }
+}
diff --git a/demos/Command-Line/CLI/README.md b/demos/Command-Line/CLI/README.md
new file mode 100644
index 0000000..3f527dd
--- /dev/null
+++ b/demos/Command-Line/CLI/README.md
@@ -0,0 +1,24 @@
+# PowerSync CLI demo app
+
+This demo features a CLI-based table view that stays *live* using a *watch query*, ensuring the data updates in real time as changes occur.
+To run this demo, you need to have the [Node.js self-host demo](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs) running, as it provides the PowerSync server that this CLI's PowerSync SDK connects to.
+
+Changes made to the backend's source DB or to the selfhosted web UI will be synced to this CLI client (and vice versa).
+
+## Authentication
+
+This essentially uses anonymous authentication. A random user ID is generated and stored in local storage. The backend returns a valid token which is not linked to a specific user. All data is synced to all users.
+
+## Getting Started
+
+Install dependencies
+
+```bash
+dotnet restore
+```
+
+To run the CLI
+
+```bash
+dotnet run Demo
+```
\ No newline at end of file
diff --git a/demos/Command-Line/CLI/user_id.txt b/demos/Command-Line/CLI/user_id.txt
new file mode 100644
index 0000000..944c084
--- /dev/null
+++ b/demos/Command-Line/CLI/user_id.txt
@@ -0,0 +1 @@
+eb6b96fc-6f45-4b36-ac92-0f47fae35838
\ No newline at end of file
diff --git a/root.sln b/root.sln
new file mode 100644
index 0000000..6800da2
--- /dev/null
+++ b/root.sln
@@ -0,0 +1,51 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.0.31903.59
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PowerSync", "PowerSync", "{B1D87BA9-8812-4EFA-BBBE-1FF1EEEB5433}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PowerSync.Common", "PowerSync\PowerSync.Common\PowerSync.Common.csproj", "{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "demos", "demos", "{6A2C8920-FA72-44E3-AB08-4FC8B2655A30}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Command-Line", "Command-Line", "{461A9BB1-D773-451F-BD3E-4765A332A782}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CLI", "demos\Command-Line\CLI\CLI.csproj", "{42BC9844-F934-4C26-9354-0B78BF770DD2}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{7404B997-119F-4826-845A-18023CC9F002}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PowerSync", "PowerSync", "{C784FBE4-CC1E-4A0A-AE8E-6B818DD3724D}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PowerSync.Common.Tests", "Tests\PowerSync\PowerSync.Common.Tests\PowerSync.Common.Tests.csproj", "{11102D8A-1B2C-4299-8289-88909F132A56}"
+EndProject
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		Debug|Any CPU = Debug|Any CPU
+		Release|Any CPU = Release|Any CPU
+	EndGlobalSection
+	GlobalSection(SolutionProperties) = preSolution
+		HideSolutionNode = FALSE
+	EndGlobalSection
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}.Release|Any CPU.Build.0 = Release|Any CPU
+		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Release|Any CPU.Build.0 = Release|Any CPU
+		{11102D8A-1B2C-4299-8289-88909F132A56}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{11102D8A-1B2C-4299-8289-88909F132A56}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{11102D8A-1B2C-4299-8289-88909F132A56}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{11102D8A-1B2C-4299-8289-88909F132A56}.Release|Any CPU.Build.0 = Release|Any CPU
+	EndGlobalSection
+	GlobalSection(NestedProjects) = preSolution
+		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393} = {B1D87BA9-8812-4EFA-BBBE-1FF1EEEB5433}
+		{461A9BB1-D773-451F-BD3E-4765A332A782} = {6A2C8920-FA72-44E3-AB08-4FC8B2655A30}
+		{42BC9844-F934-4C26-9354-0B78BF770DD2} = {461A9BB1-D773-451F-BD3E-4765A332A782}
+		{C784FBE4-CC1E-4A0A-AE8E-6B818DD3724D} = {7404B997-119F-4826-845A-18023CC9F002}
+		{11102D8A-1B2C-4299-8289-88909F132A56} = {C784FBE4-CC1E-4A0A-AE8E-6B818DD3724D}
+	EndGlobalSection
+EndGlobal

From dbe113aad5860f4be62d6eb241ff8c72e7230385 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Thu, 6 Mar 2025 11:02:59 +0200
Subject: [PATCH 02/26] Using base directory for setup script.

---
 Tools/Setup/Setup.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Tools/Setup/Setup.cs b/Tools/Setup/Setup.cs
index 3cdf9e0..6638217 100644
--- a/Tools/Setup/Setup.cs
+++ b/Tools/Setup/Setup.cs
@@ -9,7 +9,7 @@ public class Setup
     static async Task Main(string[] args)
     {
         const string baseUrl = "https://github.com/powersync-ja/powersync-sqlite-core/releases/download/v0.3.8";
-        const string powersyncCorePath = "../../PowerSync/PowerSync.Common";
+        string powersyncCorePath = Path.Combine(AppContext.BaseDirectory, "../../../../..", "PowerSync/PowerSync.Common/");
 
         string rid = GetRuntimeIdentifier();
         string nativeDir = Path.Combine(powersyncCorePath, "runtimes", rid, "native");

From 41e5d376f9e0026619aba0256f3bf55cd360b614 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Thu, 6 Mar 2025 11:16:35 +0200
Subject: [PATCH 03/26] Added readme entry for powersync extension retrieval.

---
 README.md                 |   5 +++++
 Tests/PowerSync/.DS_Store | Bin 6148 -> 0 bytes
 2 files changed, 5 insertions(+)
 delete mode 100644 Tests/PowerSync/.DS_Store

diff --git a/README.md b/README.md
index ac6e360..80bad4a 100644
--- a/README.md
+++ b/README.md
@@ -35,6 +35,11 @@ When running commands such as `dotnet run` or `dotnet test`, you may need to spe
 
 # Development
 
+Download PowerSync extension
+```bash
+dotnet run --project Tools/Setup    
+```
+
 Install dependencies
 
 ```bash
diff --git a/Tests/PowerSync/.DS_Store b/Tests/PowerSync/.DS_Store
deleted file mode 100644
index 58bdd5a41c80dc73d0872ac081b3828c53c50d9a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 6148
zcmeHKJ5Iw;5S)b+k)TLP`A&eu4W2?cK{x;fhz1;q5NcPPD@V)hM~KLwprAps*6zHw
z-g%a%@OlB*a`$)xEC9^uj`;L2HNWpZv9rn;k<K$V7;ueyJYhSjKAmvxC2q-l<@KHa
zj_t?#ZFt!X&w2jo1C*5lQa}nw0VyB_ex-o-UfTR3QBev=0V(jMfPWtv-LV&riSg-R
zh!KD|V>*oMm?enK6U1IPCNe{_q!N>AHDXxOnQv9s3&+Hy!)o}jda~7oV)1mI-=ZAW
z6BVU^6gXDkJhuz)|9A8s=Ko`oc2YnJ{3``)wpy>2e5LBGlb7>e+vpE;ui4YxxDE<K
mv}0nlV{W`1Uqw;YHDB|7FB}tt&V0~``WbLtWK!U-6*vIhE)~@P


From e5c6fbc2caa0b1037203205b1d451300afba8562 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Thu, 6 Mar 2025 11:25:48 +0200
Subject: [PATCH 04/26] DSstore gitignore entry.

---
 .gitignore | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.gitignore b/.gitignore
index 313bbdc..9d00637 100644
--- a/.gitignore
+++ b/.gitignore
@@ -38,6 +38,9 @@ obj/
 *.lastbuildstate
 *.idb
 
+.DS_STORE
+
+
 # NuGet packages
 *.nupkg
 *.snupkg

From 45371d9babfc2af8063a0a702a331cd4c9168e30 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Thu, 6 Mar 2025 11:41:39 +0200
Subject: [PATCH 05/26] Moved CLI demo to CommandLine dir.

---
 README.md                                     |  3 ++-
 .../CLI => CommandLine}/AppSchema.cs          |  2 +-
 .../CommandLine.csproj}                       |  2 +-
 .../{Command-Line/CLI => CommandLine}/Demo.cs |  3 ++-
 .../CLI => CommandLine}/NodeConnector.cs      |  2 +-
 .../CLI => CommandLine}/README.md             |  2 +-
 .../CLI => CommandLine}/user_id.txt           |  0
 root.sln                                      | 21 ++++++++-----------
 8 files changed, 17 insertions(+), 18 deletions(-)
 rename demos/{Command-Line/CLI => CommandLine}/AppSchema.cs (97%)
 rename demos/{Command-Line/CLI/CLI.csproj => CommandLine/CommandLine.csproj} (82%)
 rename demos/{Command-Line/CLI => CommandLine}/Demo.cs (95%)
 rename demos/{Command-Line/CLI => CommandLine}/NodeConnector.cs (96%)
 rename demos/{Command-Line/CLI => CommandLine}/README.md (96%)
 rename demos/{Command-Line/CLI => CommandLine}/user_id.txt (100%)

diff --git a/README.md b/README.md
index 80bad4a..fb3e6b4 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@ Demo applications are located in the [`demos/`](./demos/) directory. Also see ou
 
 ### Command-Line
 
-- [demos/Command-Line/CLI](./demos/Command-Line/CLI/README.md): A CLI to-do list example app using a Node-js backend.
+- [demos/CommandLine](./demos/CommandLine/README.md): A CLI to-do list example app using a Node-js backend.
 
 # Supported Frameworks
 
@@ -36,6 +36,7 @@ When running commands such as `dotnet run` or `dotnet test`, you may need to spe
 # Development
 
 Download PowerSync extension
+
 ```bash
 dotnet run --project Tools/Setup    
 ```
diff --git a/demos/Command-Line/CLI/AppSchema.cs b/demos/CommandLine/AppSchema.cs
similarity index 97%
rename from demos/Command-Line/CLI/AppSchema.cs
rename to demos/CommandLine/AppSchema.cs
index 726cb8f..3b788cd 100644
--- a/demos/Command-Line/CLI/AppSchema.cs
+++ b/demos/CommandLine/AppSchema.cs
@@ -1,4 +1,4 @@
-namespace CLI;
+namespace CommandLine;
 
 using PowerSync.Common.DB.Schema;
 
diff --git a/demos/Command-Line/CLI/CLI.csproj b/demos/CommandLine/CommandLine.csproj
similarity index 82%
rename from demos/Command-Line/CLI/CLI.csproj
rename to demos/CommandLine/CommandLine.csproj
index 1ea25bb..f33a0ab 100644
--- a/demos/Command-Line/CLI/CLI.csproj
+++ b/demos/CommandLine/CommandLine.csproj
@@ -20,6 +20,6 @@
 
 
   <ItemGroup>
-    <ProjectReference Include="..\..\..\PowerSync\Powersync.Common\PowerSync.Common.csproj" />
+    <ProjectReference Include="..\..\PowerSync\Powersync.Common\PowerSync.Common.csproj" />
   </ItemGroup>
 </Project>
diff --git a/demos/Command-Line/CLI/Demo.cs b/demos/CommandLine/Demo.cs
similarity index 95%
rename from demos/Command-Line/CLI/Demo.cs
rename to demos/CommandLine/Demo.cs
index 2da29cc..35c00ae 100644
--- a/demos/Command-Line/CLI/Demo.cs
+++ b/demos/CommandLine/Demo.cs
@@ -1,4 +1,5 @@
-using CLI;
+namespace CommandLine;
+
 using PowerSync.Common.Client;
 using Spectre.Console;
 
diff --git a/demos/Command-Line/CLI/NodeConnector.cs b/demos/CommandLine/NodeConnector.cs
similarity index 96%
rename from demos/Command-Line/CLI/NodeConnector.cs
rename to demos/CommandLine/NodeConnector.cs
index 354d3b8..b7c5dfc 100644
--- a/demos/Command-Line/CLI/NodeConnector.cs
+++ b/demos/CommandLine/NodeConnector.cs
@@ -1,4 +1,4 @@
-namespace CLI;
+namespace CommandLine;
 
 using System;
 using System.Collections.Generic;
diff --git a/demos/Command-Line/CLI/README.md b/demos/CommandLine/README.md
similarity index 96%
rename from demos/Command-Line/CLI/README.md
rename to demos/CommandLine/README.md
index 3f527dd..8a7b5f4 100644
--- a/demos/Command-Line/CLI/README.md
+++ b/demos/CommandLine/README.md
@@ -17,7 +17,7 @@ Install dependencies
 dotnet restore
 ```
 
-To run the CLI
+To run the Command-Line interface
 
 ```bash
 dotnet run Demo
diff --git a/demos/Command-Line/CLI/user_id.txt b/demos/CommandLine/user_id.txt
similarity index 100%
rename from demos/Command-Line/CLI/user_id.txt
rename to demos/CommandLine/user_id.txt
diff --git a/root.sln b/root.sln
index 6800da2..b91bdc3 100644
--- a/root.sln
+++ b/root.sln
@@ -7,18 +7,16 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PowerSync", "PowerSync", "{
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PowerSync.Common", "PowerSync\PowerSync.Common\PowerSync.Common.csproj", "{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}"
 EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "demos", "demos", "{6A2C8920-FA72-44E3-AB08-4FC8B2655A30}"
-EndProject
-Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Command-Line", "Command-Line", "{461A9BB1-D773-451F-BD3E-4765A332A782}"
-EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CLI", "demos\Command-Line\CLI\CLI.csproj", "{42BC9844-F934-4C26-9354-0B78BF770DD2}"
-EndProject
 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{7404B997-119F-4826-845A-18023CC9F002}"
 EndProject
 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PowerSync", "PowerSync", "{C784FBE4-CC1E-4A0A-AE8E-6B818DD3724D}"
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PowerSync.Common.Tests", "Tests\PowerSync\PowerSync.Common.Tests\PowerSync.Common.Tests.csproj", "{11102D8A-1B2C-4299-8289-88909F132A56}"
 EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "demos", "demos", "{9144195A-C68F-4B1E-A574-474EDD424D6C}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CommandLine", "demos\CommandLine\CommandLine.csproj", "{D7FDA714-D29F-4D85-B3F2-74C6810A36F9}"
+EndProject
 Global
 	GlobalSection(SolutionConfigurationPlatforms) = preSolution
 		Debug|Any CPU = Debug|Any CPU
@@ -32,20 +30,19 @@ Global
 		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}.Debug|Any CPU.Build.0 = Debug|Any CPU
 		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}.Release|Any CPU.ActiveCfg = Release|Any CPU
 		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393}.Release|Any CPU.Build.0 = Release|Any CPU
-		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
-		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Debug|Any CPU.Build.0 = Debug|Any CPU
-		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Release|Any CPU.ActiveCfg = Release|Any CPU
-		{42BC9844-F934-4C26-9354-0B78BF770DD2}.Release|Any CPU.Build.0 = Release|Any CPU
 		{11102D8A-1B2C-4299-8289-88909F132A56}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
 		{11102D8A-1B2C-4299-8289-88909F132A56}.Debug|Any CPU.Build.0 = Debug|Any CPU
 		{11102D8A-1B2C-4299-8289-88909F132A56}.Release|Any CPU.ActiveCfg = Release|Any CPU
 		{11102D8A-1B2C-4299-8289-88909F132A56}.Release|Any CPU.Build.0 = Release|Any CPU
+		{D7FDA714-D29F-4D85-B3F2-74C6810A36F9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{D7FDA714-D29F-4D85-B3F2-74C6810A36F9}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{D7FDA714-D29F-4D85-B3F2-74C6810A36F9}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{D7FDA714-D29F-4D85-B3F2-74C6810A36F9}.Release|Any CPU.Build.0 = Release|Any CPU
 	EndGlobalSection
 	GlobalSection(NestedProjects) = preSolution
 		{EA0A66FF-9E8A-4465-84EC-B2CFFC7E4393} = {B1D87BA9-8812-4EFA-BBBE-1FF1EEEB5433}
-		{461A9BB1-D773-451F-BD3E-4765A332A782} = {6A2C8920-FA72-44E3-AB08-4FC8B2655A30}
-		{42BC9844-F934-4C26-9354-0B78BF770DD2} = {461A9BB1-D773-451F-BD3E-4765A332A782}
 		{C784FBE4-CC1E-4A0A-AE8E-6B818DD3724D} = {7404B997-119F-4826-845A-18023CC9F002}
 		{11102D8A-1B2C-4299-8289-88909F132A56} = {C784FBE4-CC1E-4A0A-AE8E-6B818DD3724D}
+		{D7FDA714-D29F-4D85-B3F2-74C6810A36F9} = {9144195A-C68F-4B1E-A574-474EDD424D6C}
 	EndGlobalSection
 EndGlobal

From 521496522891cb5f658094bc00fedc629d395472 Mon Sep 17 00:00:00 2001
From: benitav <benita@journeyapps.com>
Date: Thu, 6 Mar 2025 17:09:35 +0200
Subject: [PATCH 06/26] Demo Readme tweak

---
 demos/CommandLine/README.md | 12 ++++++++++--
 1 file changed, 10 insertions(+), 2 deletions(-)

diff --git a/demos/CommandLine/README.md b/demos/CommandLine/README.md
index 8a7b5f4..901f1c9 100644
--- a/demos/CommandLine/README.md
+++ b/demos/CommandLine/README.md
@@ -11,13 +11,21 @@ This essentially uses anonymous authentication. A random user ID is generated an
 
 ## Getting Started
 
-Install dependencies
+In the repo root, run the following to download the PowerSync extension:
+
+```bash
+dotnet run --project Tools/Setup    
+```
+
+Then switch into the demo's directory:
+
+Install dependencies:
 
 ```bash
 dotnet restore
 ```
 
-To run the Command-Line interface
+To run the Command-Line interface:
 
 ```bash
 dotnet run Demo

From 819425bf70ca1c5eedfa14cebf87473f1e75bb55 Mon Sep 17 00:00:00 2001
From: benitav <benita@journeyapps.com>
Date: Thu, 6 Mar 2025 17:13:00 +0200
Subject: [PATCH 07/26] self-hosted

---
 demos/CommandLine/README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/demos/CommandLine/README.md b/demos/CommandLine/README.md
index 901f1c9..8e90319 100644
--- a/demos/CommandLine/README.md
+++ b/demos/CommandLine/README.md
@@ -3,7 +3,7 @@
 This demo features a CLI-based table view that stays *live* using a *watch query*, ensuring the data updates in real time as changes occur.
 To run this demo, you need to have the [Node.js self-host demo](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs) running, as it provides the PowerSync server that this CLI's PowerSync SDK connects to.
 
-Changes made to the backend's source DB or to the selfhosted web UI will be synced to this CLI client (and vice versa).
+Changes made to the backend's source DB or to the self-hosted web UI will be synced to this CLI client (and vice versa).
 
 ## Authentication
 

From 3899f0d271d5682205fe5319d2cc44d5ce988e43 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Fri, 7 Mar 2025 12:01:43 +0200
Subject: [PATCH 08/26] Added .net9 to target frameworks for the package and
 tests. Added setup guide for .NET Framework 4.8

---
 .../PowerSync.Common/PowerSync.Common.csproj  |  2 +-
 README.md                                     | 32 +++++++++++++++----
 .../PowerSync.Common.Tests.csproj             |  2 +-
 3 files changed, 28 insertions(+), 8 deletions(-)

diff --git a/PowerSync/PowerSync.Common/PowerSync.Common.csproj b/PowerSync/PowerSync.Common/PowerSync.Common.csproj
index 3c85f7e..28610d8 100644
--- a/PowerSync/PowerSync.Common/PowerSync.Common.csproj
+++ b/PowerSync/PowerSync.Common/PowerSync.Common.csproj
@@ -1,7 +1,7 @@
 <Project Sdk="Microsoft.NET.Sdk">
 
   <PropertyGroup>
-    <TargetFrameworks>netstandard2.0;net6.0;net8.0</TargetFrameworks>
+    <TargetFrameworks>netstandard2.0;net6.0;net8.0;net9.0</TargetFrameworks>
     <LangVersion>12</LangVersion>
     <ImplicitUsings>enable</ImplicitUsings>
     <Nullable>enable</Nullable>
diff --git a/README.md b/README.md
index fb3e6b4..bd2fa45 100644
--- a/README.md
+++ b/README.md
@@ -24,12 +24,32 @@ Demo applications are located in the [`demos/`](./demos/) directory. Also see ou
 
 # Supported Frameworks
 
-This monorepo currently targets the following .NET versions:
--	**.NET 8** ([primary target](https://dotnet.microsoft.com/en-us/download/dotnet/8.0), recommended for all new projects)
-- **.NET 6** (supported for compatibility with older projects)
--	**.NET Standard 2.0** (for compatibility with older libraries and frameworks)
-
-We are also actively working towards adding support for **.NET Framework 4.8** to enable compatibility with legacy applications.
+This PowerSync SDK currently targets the following .NET versions:
+- **.NET 9** - [Latest version](https://dotnet.microsoft.com/en-us/download/dotnet/9.0)
+-	**.NET 8** - [Current LTS Version, used for development of this project](https://dotnet.microsoft.com/en-us/download/dotnet/8.0)
+- **.NET 6** - supported for compatibility with older projects)
+-	**.NET Standard 2.0** - for compatibility with older libraries and frameworks, tested/verified older versions will be listed below.
+
+- .NET Framework 4.8:
+    
+    To get a .NET Framework 4.8 working with this SDK add the following to your `.csproj` file:
+
+    ```xml
+    <PropertyGroup>
+      ...
+      <!-- Ensures the correct SQLite DLL is available -->
+      <RuntimeIdentifiers>win-x86;win-x64</RuntimeIdentifiers>
+      <RuntimeIdentifier>win-x64</RuntimeIdentifier>
+    </PropertyGroup>
+
+    <ItemGroup>
+      ...
+      <!-- Ensures the HTTP client resolves in the SDK -->
+      <PackageReference Include="System.Net.Http" Version="4.3.4" /> 
+    </ItemGroup>
+    ```
+    
+------- 
 
 When running commands such as `dotnet run` or `dotnet test`, you may need to specify the target framework explicitly using the `--framework` flag.
 
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj b/Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj
index 50f5bd9..1113c39 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj
+++ b/Tests/PowerSync/PowerSync.Common.Tests/PowerSync.Common.Tests.csproj
@@ -1,7 +1,7 @@
 <Project Sdk="Microsoft.NET.Sdk">
 
   <PropertyGroup>
-    <TargetFrameworks>netstandard2.0;net6.0;net8.0</TargetFrameworks>
+    <TargetFrameworks>netstandard2.0;net6.0;net8.0;net9.0</TargetFrameworks>
     <LangVersion>12</LangVersion>
     <ImplicitUsings>enable</ImplicitUsings>
     <Nullable>enable</Nullable>

From 32837539d01c4c4720e0bdfe8a7d93b9986bdbbe Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Fri, 7 Mar 2025 16:28:59 +0200
Subject: [PATCH 09/26] Emitting connected=false immediately on network break,
 SDK picks up instantly when the connected status should be false now.

---
 .../Client/Sync/Stream/StreamingSyncImplementation.cs      | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
index ca807c4..d1dec26 100644
--- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
@@ -297,7 +297,12 @@ protected async Task StreamingSync(CancellationToken? signal, PowerSyncConnectio
                 // This loop will retry.
                 // The nested abort controller will cleanup any open network requests and streams.
                 // The WebRemote should only abort pending fetch requests or close active Readable streams.
-                //
+
+                UpdateSyncStatus(new SyncStatusOptions
+                {
+                    Connected = false,
+                });
+
                 // On error, wait a little before retrying
                 await DelayRetry();
             }

From c62c6ab1b8d2f6c67c566ef557b4e042f900afe8 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Mon, 10 Mar 2025 08:54:34 +0200
Subject: [PATCH 10/26] Release status message.

---
 README.md | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/README.md b/README.md
index bd2fa45..4043bd9 100644
--- a/README.md
+++ b/README.md
@@ -8,6 +8,12 @@ _[PowerSync](https://www.powersync.com) is a sync engine for building local-firs
 
 `powersync-dotnet` is the monorepo for PowerSync .NET SDKs.
 
+## ⚠️ Project Status & Release Note
+
+This package is part of a monorepo that is not yet officially released or published. It is currently in a pre-alpha state, intended strictly for closed testing. Expect breaking changes and instability as development continues.
+
+Do not rely on this package for production use.
+
 ## Monorepo Structure: Packages
 
 - [PowerSync/Common](./PowerSync/Common/README.md)

From 0cae6fae822af8ef697e662ac5cbfa16a73bb8fa Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Tue, 11 Mar 2025 11:44:40 +0200
Subject: [PATCH 11/26] Added readlock/writelock/transactions to
 PowerSyncDatabase. Started transaction tests.

---
 .../Client/PowerSyncDatabase.cs               |  39 ++-
 .../PowerSyncDatabaseTransactionTests.cs      | 259 ++++++++++++++++++
 2 files changed, 297 insertions(+), 1 deletion(-)
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs

diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
index ceb6d0c..62cc759 100644
--- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
+++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
@@ -436,7 +436,7 @@ await Database.WriteTransaction(async tx =>
     /// </summary>
     public async Task<CrudTransaction?> GetNextCrudTransaction()
     {
-        return await Database.ReadTransaction(async tx =>
+        return await ReadTransaction(async tx =>
         {
             var first = await tx.GetOptional<CrudEntryJSON>(
             $"SELECT id, tx_id, data FROM {PSInternalTable.CRUD} ORDER BY id ASC LIMIT 1");
@@ -529,6 +529,43 @@ public async Task<T> Get<T>(string query, object[]? parameters = null)
         return await Database.Get<T>(query, parameters);
     }
 
+    public async Task<T> ReadLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null)
+    {
+        await WaitForReady();
+        return await Database.ReadLock(fn, options);
+    }
+
+    public async Task WriteLock(Func<ILockContext, Task> fn, DBLockOptions? options = null)
+    {
+        await WaitForReady();
+        await Database.WriteLock(fn, options);
+    }
+
+    public async Task<T> WriteLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null)
+    {
+        await WaitForReady();
+        return await Database.WriteLock(fn, options);
+    }
+
+    public async Task<T> ReadTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null)
+    {
+        await WaitForReady();
+        return await Database.ReadTransaction(fn, options);
+    }
+
+    public async Task WriteTransaction(Func<ITransaction, Task> fn, DBLockOptions? options = null)
+    {
+        await WaitForReady();
+        await Database.WriteTransaction(fn, options);
+    }
+
+    public async Task<T> WriteTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null)
+    {
+        await WaitForReady();
+        return await Database.WriteTransaction(fn, options);
+    }
+
+
 
     /// <summary>
     /// Executes a read query every time the source tables are modified.
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs
new file mode 100644
index 0000000..1e97cfa
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs
@@ -0,0 +1,259 @@
+namespace PowerSync.Common.Tests.Database;
+
+using PowerSync.Common.Client;
+
+public class PowerSyncDatabaseTransactionTests : IAsyncLifetime
+{
+    private PowerSyncDatabase db = default!;
+
+
+    public async Task InitializeAsync()
+    {
+        db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = "powersyncDataBaseTransactions.db" },
+            Schema = TestData.appSchema,
+        });
+        await db.Init();
+    }
+
+    public async Task DisposeAsync()
+    {
+        await db.DisconnectAndClear();
+        await db.Close();
+    }
+
+    private record IdResult(string id);
+    private record AssetResult(string id, string description, string? make = null);
+
+
+    // [Fact]
+    // public async Task SimpleReadTransactionTest()
+    // {
+    //     await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]);
+
+    //     var result = await db.Database.ReadTransaction(async tx =>
+    //     {
+    //         return await tx.GetAll<IdResult>("SELECT * FROM assets");
+    //     });
+
+    //     Assert.Single(result);
+    // }
+
+    // [Fact]
+    // public async Task ManualCommitTest()
+    // {
+    //     await db.WriteTransaction(async tx =>
+    //     {
+    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]);
+    //         await tx.Commit();
+    //     });
+
+    //     var result = await db.GetAll<IdResult>("SELECT * FROM assets WHERE id = ?", ["O4"]);
+
+    //     Assert.Single(result);
+    //     Assert.Equal("O4", result.First().id);
+    // }
+
+    // [Fact]
+    // public async Task AutoCommitTest()
+    // {
+    //     await db.WriteTransaction(async tx =>
+    //     {
+    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O41"]);
+    //     });
+
+    //     var result = await db.GetAll<IdResult>("SELECT * FROM assets WHERE id = ?", ["O41"]);
+
+    //     Assert.Single(result);
+    //     Assert.Equal("O41", result.First().id);
+    // }
+
+
+    // it('Transaction, manual rollback', async () => {
+    //   const {name, age, networth} = generateUserInfo();
+
+    //   await db.writeTransaction(async tx => {
+    //     await tx.execute(
+    //       'INSERT INTO "users" (id, name, age, networth) VALUES(uuid(), ?, ?, ?)',
+    //       [name, age, networth],
+    //     );
+    //     await tx.rollback();
+    //   });
+
+    //   const res = await db.execute('SELECT * FROM users');
+    //   expect(res.rows?._array).to.eql([]);
+    // });
+
+    // [Fact]
+    // public async Task ManualRollbackTest()
+    // {
+    //     await db.WriteTransaction(async tx =>
+    //     {
+    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O5"]);
+    //         await tx.Rollback();
+    //     });
+
+    //     var result = await db.GetAll<object>("SELECT * FROM assets");
+    //     Assert.Empty(result);
+    // }
+
+    // [Fact]
+    // public async Task AutoRollbackTest()
+    // {
+    //     bool exceptionThrown = false;
+    //     try
+    //     {
+    //         await db.WriteTransaction(async tx =>
+    //         {
+    //             // This should throw an exception
+    //             await tx.Execute("INSERT INTO assets(id) VALUES_SYNTAX_ERROR(?)", ["O5"]);
+    //         });
+    //     }
+    //     catch (Exception ex)
+    //     {
+    //         Assert.Contains("near \"VALUES_SYNTAX_ERROR\": syntax error", ex.Message);
+    //         exceptionThrown = true;
+    //     }
+
+    //     var result = await db.GetAll<IdResult>("SELECT * FROM assets");
+    //     Assert.Empty(result);
+    //     Assert.True(exceptionThrown);
+    // }
+
+    // [Fact]
+    // public async Task WriteTransactionWithReturnTest()
+    // {
+    //     var result = await db.WriteTransaction(async tx =>
+    //     {
+    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O5"]);
+    //         return await tx.GetAll<IdResult>("SELECT * FROM assets");
+    //     });
+
+    //     Assert.Single(result);
+    //     Assert.Equal("O5", result.First().id);
+    // }
+
+
+    // [Fact]
+    // public async Task WriteTransactionNestedQueryTest()
+    // {
+    //     await db.WriteTransaction(async tx =>
+    //     {
+    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O6"]);
+
+    //         var txQuery = await tx.GetAll<IdResult>("SELECT * FROM assets");
+    //         Assert.Single(txQuery);
+
+    //         var dbQuery = await db.GetAll<IdResult>("SELECT * FROM assets");
+    //         Assert.Empty(dbQuery);
+    //     });
+    // }
+
+    // [Fact]
+    // public async Task ReadLockShouldBeReadOnlyTest()
+    // {
+    //     string id = Guid.NewGuid().ToString();
+    //     bool exceptionThrown = false;
+
+    //     try
+    //     {
+    //         await db.ReadLock<object>(async context =>
+    //         {
+    //             return await context.Execute(
+    //                 "INSERT INTO assets (id) VALUES (?)",
+    //                 [id]
+    //             );
+    //         });
+
+    //         // If no exception is thrown, fail the test
+    //         throw new Exception("Did not throw");
+    //     }
+    //     catch (Exception ex)
+    //     {
+    //         Assert.Contains("attempt to write a readonly database", ex.Message);
+    //         exceptionThrown = true;
+    //     }
+
+    //     Assert.True(exceptionThrown);
+    // }
+
+    // [Fact]
+    // public async Task ReadLocksShouldQueueIfExceedNumberOfConnections()
+    // {
+    //     string id = Guid.NewGuid().ToString();
+
+    //     await db.Execute(
+    //         "INSERT INTO assets (id) VALUES (?)",
+    //         [id]
+    //     );
+
+    //     int numberOfReads = 20;
+    //     var tasks = Enumerable.Range(0, numberOfReads)
+    //         .Select(_ => db.ReadLock(async context =>
+    //         {
+    //             return await context.GetAll<AssetResult>("SELECT id FROM assets WHERE id = ?", [id]);
+    //         }))
+    //         .ToArray();
+
+    //     var lockResults = await Task.WhenAll(tasks);
+
+    //     var ids = lockResults.Select(r => r.FirstOrDefault()?.id).ToList();
+
+    //     Assert.All(ids, n => Assert.Equal(id, n));
+    // }
+
+    [Fact(Timeout = 2000)]
+    public async Task ShouldBeAbleToReadWhileAWriteIsRunning()
+    {
+        var tcs = new TaskCompletionSource();
+
+        // This wont resolve or free until another connection free's it
+        var writeTask = db.WriteLock(async context =>
+        {
+            await tcs.Task; // Wait until read lock signals to proceed
+        });
+
+        var readTask = db.ReadLock(async context =>
+        {
+            // Read logic could execute here while writeLock is still open
+            tcs.SetResult();
+            return 42;
+        });
+
+        var result = await readTask;
+        await writeTask; // Ensure write task completes
+
+        Assert.Equal(42, result);
+    }
+
+    // [Fact(Timeout = 5000)]
+    // public async Task TestConcurrentReads()
+    // {
+    //     await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O6-conccurent-1"]);
+    //     var tcs = new TaskCompletionSource<bool>();
+
+    //     // Start a long-running write transaction
+    //     var transactionTask = Task.Run(async () =>
+    //     {
+    //         await db.WriteTransaction(async tx =>
+    //         {
+    //             await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O6-conccurent-2"]);
+    //             await tcs.Task;
+    //             await tx.Commit();
+    //         });
+    //     });
+
+    //     // Try and read while the write transaction is still open
+    //     var result = await db.GetAll<object>("SELECT * FROM assets");
+    //     Assert.Single(result); // The transaction is not commited yet, we should only read 1 user
+
+    //     // Let the transaction complete
+    //     tcs.SetResult(true);
+    //     await transactionTask;
+
+    //     // Read again after the transaction is committed
+    //     var afterTx = await db.GetAll<object>("SELECT * FROM assets");
+    //     Assert.Equal(2, afterTx.Length);
+    // }
+}
\ No newline at end of file

From af01b33ed468ffa53ee4310edf4027f4543b8d34 Mon Sep 17 00:00:00 2001
From: benitav <benita@journeyapps.com>
Date: Tue, 11 Mar 2025 13:07:30 +0200
Subject: [PATCH 12/26] Mention other compatible self-host-demos

---
 demos/CommandLine/README.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/demos/CommandLine/README.md b/demos/CommandLine/README.md
index 8e90319..eba4087 100644
--- a/demos/CommandLine/README.md
+++ b/demos/CommandLine/README.md
@@ -1,7 +1,7 @@
 # PowerSync CLI demo app
 
 This demo features a CLI-based table view that stays *live* using a *watch query*, ensuring the data updates in real time as changes occur.
-To run this demo, you need to have the [Node.js self-host demo](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs) running, as it provides the PowerSync server that this CLI's PowerSync SDK connects to.
+To run this demo, you need to have one of our Node.js self-host demos ([Postgres]((https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs)) | [MongoDB](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs-mongodb) | [MySQL](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs-mysql)) running, as it provides the PowerSync server that this CLI's PowerSync SDK connects to.
 
 Changes made to the backend's source DB or to the self-hosted web UI will be synced to this CLI client (and vice versa).
 

From 29cf7098162e2ab65a688f8cd278d04866b36b0c Mon Sep 17 00:00:00 2001
From: benitav <benitav@users.noreply.github.com>
Date: Tue, 11 Mar 2025 13:08:54 +0200
Subject: [PATCH 13/26] Update README.md

---
 demos/CommandLine/README.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/demos/CommandLine/README.md b/demos/CommandLine/README.md
index eba4087..0f9fb70 100644
--- a/demos/CommandLine/README.md
+++ b/demos/CommandLine/README.md
@@ -1,7 +1,7 @@
 # PowerSync CLI demo app
 
 This demo features a CLI-based table view that stays *live* using a *watch query*, ensuring the data updates in real time as changes occur.
-To run this demo, you need to have one of our Node.js self-host demos ([Postgres]((https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs)) | [MongoDB](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs-mongodb) | [MySQL](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs-mysql)) running, as it provides the PowerSync server that this CLI's PowerSync SDK connects to.
+To run this demo, you need to have one of our Node.js self-host demos ([Postgres](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs) | [MongoDB](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs-mongodb) | [MySQL](https://github.com/powersync-ja/self-host-demo/tree/main/demos/nodejs-mysql)) running, as it provides the PowerSync server that this CLI's PowerSync SDK connects to.
 
 Changes made to the backend's source DB or to the self-hosted web UI will be synced to this CLI client (and vice versa).
 
@@ -29,4 +29,4 @@ To run the Command-Line interface:
 
 ```bash
 dotnet run Demo
-```
\ No newline at end of file
+```

From a202e59dddabad9c76876b9283db385053c6c204 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Tue, 11 Mar 2025 13:50:16 +0200
Subject: [PATCH 14/26] Completed transaction tests.

---
 .../PowerSyncDatabaseTransactionTests.cs      | 381 ++++++++++++++++++
 .../PowerSyncDatabaseTransactionTests.cs      | 259 ------------
 2 files changed, 381 insertions(+), 259 deletions(-)
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
 delete mode 100644 Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs

diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
new file mode 100644
index 0000000..9509176
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
@@ -0,0 +1,381 @@
+namespace PowerSync.Common.Tests.Client;
+
+using System.Diagnostics;
+using PowerSync.Common.Client;
+
+public class PowerSyncDatabaseTransactionTests : IAsyncLifetime
+{
+    private PowerSyncDatabase db = default!;
+
+
+    public async Task InitializeAsync()
+    {
+        db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = "powersyncDataBaseTransactions.db" },
+            Schema = TestData.appSchema,
+        });
+        await db.Init();
+    }
+
+    public async Task DisposeAsync()
+    {
+        await db.DisconnectAndClear();
+        await db.Close();
+    }
+
+    private record IdResult(string id);
+    private record AssetResult(string id, string description, string? make = null);
+    private record CountResult(int count);
+
+    [Fact]
+    public async Task SimpleReadTransactionTest()
+    {
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]);
+
+        var result = await db.Database.ReadTransaction(async tx =>
+        {
+            return await tx.GetAll<IdResult>("SELECT * FROM assets");
+        });
+
+        Assert.Single(result);
+    }
+
+    [Fact]
+    public async Task ManualCommitTest()
+    {
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]);
+            await tx.Commit();
+        });
+
+        var result = await db.GetAll<IdResult>("SELECT * FROM assets WHERE id = ?", ["O4"]);
+
+        Assert.Single(result);
+        Assert.Equal("O4", result.First().id);
+    }
+
+    [Fact]
+    public async Task AutoCommitTest()
+    {
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O41"]);
+        });
+
+        var result = await db.GetAll<IdResult>("SELECT * FROM assets WHERE id = ?", ["O41"]);
+
+        Assert.Single(result);
+        Assert.Equal("O41", result.First().id);
+    }
+
+    [Fact]
+    public async Task ManualRollbackTest()
+    {
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O5"]);
+            await tx.Rollback();
+        });
+
+        var result = await db.GetAll<object>("SELECT * FROM assets");
+        Assert.Empty(result);
+    }
+
+    [Fact]
+    public async Task AutoRollbackTest()
+    {
+        bool exceptionThrown = false;
+        try
+        {
+            await db.WriteTransaction(async tx =>
+            {
+                // This should throw an exception
+                await tx.Execute("INSERT INTO assets(id) VALUES_SYNTAX_ERROR(?)", ["O5"]);
+            });
+        }
+        catch (Exception ex)
+        {
+            Assert.Contains("near \"VALUES_SYNTAX_ERROR\": syntax error", ex.Message);
+            exceptionThrown = true;
+        }
+
+        var result = await db.GetAll<IdResult>("SELECT * FROM assets");
+        Assert.Empty(result);
+        Assert.True(exceptionThrown);
+    }
+
+    [Fact]
+    public async Task WriteTransactionWithReturnTest()
+    {
+        var result = await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O5"]);
+            return await tx.GetAll<IdResult>("SELECT * FROM assets");
+        });
+
+        Assert.Single(result);
+        Assert.Equal("O5", result.First().id);
+    }
+
+
+    [Fact]
+    public async Task WriteTransactionNestedQueryTest()
+    {
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O6"]);
+
+            var txQuery = await tx.GetAll<IdResult>("SELECT * FROM assets");
+            Assert.Single(txQuery);
+
+            var dbQuery = await db.GetAll<IdResult>("SELECT * FROM assets");
+            Assert.Empty(dbQuery);
+        });
+    }
+
+    [Fact]
+    public async Task ReadLockShouldBeReadOnlyTest()
+    {
+        string id = Guid.NewGuid().ToString();
+        bool exceptionThrown = false;
+
+        try
+        {
+            await db.ReadLock<object>(async context =>
+            {
+                return await context.Execute(
+                    "INSERT INTO assets (id) VALUES (?)",
+                    [id]
+                );
+            });
+
+            // If no exception is thrown, fail the test
+            throw new Exception("Did not throw");
+        }
+        catch (Exception ex)
+        {
+            Assert.Contains("attempt to write a readonly database", ex.Message);
+            exceptionThrown = true;
+        }
+
+        Assert.True(exceptionThrown);
+    }
+
+    [Fact]
+    public async Task ReadLocksShouldQueueIfExceedNumberOfConnectionsTest()
+    {
+        string id = Guid.NewGuid().ToString();
+
+        await db.Execute(
+            "INSERT INTO assets (id) VALUES (?)",
+            [id]
+        );
+
+        int numberOfReads = 20;
+        var tasks = Enumerable.Range(0, numberOfReads)
+            .Select(_ => db.ReadLock(async context =>
+            {
+                return await context.GetAll<AssetResult>("SELECT id FROM assets WHERE id = ?", [id]);
+            }))
+            .ToArray();
+
+        var lockResults = await Task.WhenAll(tasks);
+
+        var ids = lockResults.Select(r => r.FirstOrDefault()?.id).ToList();
+
+        Assert.All(ids, n => Assert.Equal(id, n));
+    }
+
+    [Fact(Timeout = 2000)]
+    public async Task ShouldBeAbleToReadWhileAWriteIsRunningTest()
+    {
+        var tcs = new TaskCompletionSource();
+
+        // This wont resolve or free until another connection free's it
+        var writeTask = db.WriteLock(async context =>
+        {
+            await tcs.Task; // Wait until read lock signals to proceed
+        });
+
+        var readTask = db.ReadLock(async context =>
+        {
+            // Read logic could execute here while writeLock is still open
+            tcs.SetResult();
+            await Task.CompletedTask;
+            return 42;
+        });
+
+        var result = await readTask;
+        await writeTask;
+
+        Assert.Equal(42, result);
+    }
+
+    [Fact(Timeout = 2000)]
+    public async Task ShouldQueueSimultaneousExecutionsTest()
+    {
+        var order = new List<int>();
+        var operationCount = 5;
+
+        await db.WriteLock(async context =>
+        {
+            var tasks = Enumerable.Range(0, operationCount)
+                .Select(async index =>
+                {
+                    await context.Execute("SELECT * FROM assets");
+                    order.Add(index);
+                })
+                .ToArray();
+
+            await Task.WhenAll(tasks);
+        });
+
+        var expectedOrder = Enumerable.Range(0, operationCount).ToList();
+        Assert.Equal(expectedOrder, order);
+    }
+
+    [Fact(Timeout = 2000)]
+    public async Task ShouldCallUpdateHookOnChangesTest()
+    {
+        var cts = new CancellationTokenSource();
+        var result = new TaskCompletionSource();
+
+        db.OnChange(new WatchOnChangeHandler
+        {
+            OnChange = (x) =>
+            {
+                result.SetResult();
+                cts.Cancel();
+                return Task.CompletedTask;
+            }
+        }, new SQLWatchOptions
+        {
+            Tables = ["assets"],
+            Signal = cts.Token
+        });
+        await db.Execute("INSERT INTO assets (id) VALUES(?)", ["099-onchange"]);
+
+        await result.Task;
+    }
+
+    [Fact(Timeout = 2000)]
+    public async Task ShouldReflectWriteTransactionUpdatesOnReadConnectionsTest()
+    {
+        var watched = new TaskCompletionSource();
+
+        var cts = new CancellationTokenSource();
+        db.Watch("SELECT COUNT(*) as count FROM assets", null, new WatchHandler<CountResult>
+        {
+            OnResult = (x) =>
+            {
+                if (x.First().count == 1)
+                {
+                    watched.SetResult();
+                    cts.Cancel();
+                }
+            }
+        }, new SQLWatchOptions
+        {
+
+            Signal = cts.Token
+        });
+
+        await db.WriteTransaction(async tx =>
+        {
+            await tx.Execute("INSERT INTO assets (id) VALUES(?)", ["099-watch"]);
+        });
+
+        await watched.Task;
+    }
+
+    [Fact(Timeout = 2000)]
+    public async Task ShouldReflectWriteLockUpdatesOnReadConnectionsTest()
+    {
+        var numberOfUsers = 10_000;
+
+        var watched = new TaskCompletionSource();
+
+        var cts = new CancellationTokenSource();
+        db.Watch("SELECT COUNT(*) as count FROM assets", null, new WatchHandler<CountResult>
+        {
+            OnResult = (x) =>
+            {
+                if (x.First().count == numberOfUsers)
+                {
+                    watched.SetResult();
+                    cts.Cancel();
+                }
+            }
+        }, new SQLWatchOptions
+        {
+            Signal = cts.Token
+        });
+
+        await db.WriteLock(async tx =>
+        {
+            await tx.Execute("BEGIN");
+            for (var i = 0; i < numberOfUsers; i++)
+            {
+                await tx.Execute("INSERT INTO assets (id) VALUES(?)", ["0" + i + "-writelock"]);
+            }
+            await tx.Execute("COMMIT");
+        });
+
+        await watched.Task;
+    }
+
+    [Fact(Timeout = 5000)]
+    public async Task Insert10000Records_ShouldCompleteWithinTimeLimitTest()
+    {
+        var random = new Random();
+        var stopwatch = Stopwatch.StartNew();
+
+        for (int i = 0; i < 10000; ++i)
+        {
+            int n = random.Next(0, 100000);
+            await db.Execute(
+                "INSERT INTO assets(id, description) VALUES(?, ?)",
+                [i + 1, n]
+            );
+        }
+
+        await db.Execute("PRAGMA wal_checkpoint(RESTART)");
+
+        stopwatch.Stop();
+        var duration = stopwatch.ElapsedMilliseconds;
+
+        Assert.True(duration < 2000, $"Test took too long: {duration}ms");
+    }
+
+    [Fact(Timeout = 5000)]
+    public async Task TestConcurrentReadsTest()
+    {
+        await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O6-conccurent-1"]);
+        var tcs = new TaskCompletionSource<bool>();
+
+        // Start a long-running write transaction
+        var transactionTask = Task.Run(async () =>
+        {
+            await db.WriteTransaction(async tx =>
+            {
+                await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O6-conccurent-2"]);
+                await tcs.Task;
+                await tx.Commit();
+            });
+        });
+
+        // Try and read while the write transaction is still open
+        var result = await db.GetAll<object>("SELECT * FROM assets");
+        Assert.Single(result); // The transaction is not commited yet, we should only read 1 user
+
+        // Let the transaction complete
+        tcs.SetResult(true);
+        await transactionTask;
+
+        // Read again after the transaction is committed
+        var afterTx = await db.GetAll<object>("SELECT * FROM assets");
+        Assert.Equal(2, afterTx.Length);
+    }
+}
\ No newline at end of file
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs
deleted file mode 100644
index 1e97cfa..0000000
--- a/Tests/PowerSync/PowerSync.Common.Tests/Database/PowerSyncDatabaseTransactionTests.cs
+++ /dev/null
@@ -1,259 +0,0 @@
-namespace PowerSync.Common.Tests.Database;
-
-using PowerSync.Common.Client;
-
-public class PowerSyncDatabaseTransactionTests : IAsyncLifetime
-{
-    private PowerSyncDatabase db = default!;
-
-
-    public async Task InitializeAsync()
-    {
-        db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
-        {
-            Database = new SQLOpenOptions { DbFilename = "powersyncDataBaseTransactions.db" },
-            Schema = TestData.appSchema,
-        });
-        await db.Init();
-    }
-
-    public async Task DisposeAsync()
-    {
-        await db.DisconnectAndClear();
-        await db.Close();
-    }
-
-    private record IdResult(string id);
-    private record AssetResult(string id, string description, string? make = null);
-
-
-    // [Fact]
-    // public async Task SimpleReadTransactionTest()
-    // {
-    //     await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O3"]);
-
-    //     var result = await db.Database.ReadTransaction(async tx =>
-    //     {
-    //         return await tx.GetAll<IdResult>("SELECT * FROM assets");
-    //     });
-
-    //     Assert.Single(result);
-    // }
-
-    // [Fact]
-    // public async Task ManualCommitTest()
-    // {
-    //     await db.WriteTransaction(async tx =>
-    //     {
-    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O4"]);
-    //         await tx.Commit();
-    //     });
-
-    //     var result = await db.GetAll<IdResult>("SELECT * FROM assets WHERE id = ?", ["O4"]);
-
-    //     Assert.Single(result);
-    //     Assert.Equal("O4", result.First().id);
-    // }
-
-    // [Fact]
-    // public async Task AutoCommitTest()
-    // {
-    //     await db.WriteTransaction(async tx =>
-    //     {
-    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O41"]);
-    //     });
-
-    //     var result = await db.GetAll<IdResult>("SELECT * FROM assets WHERE id = ?", ["O41"]);
-
-    //     Assert.Single(result);
-    //     Assert.Equal("O41", result.First().id);
-    // }
-
-
-    // it('Transaction, manual rollback', async () => {
-    //   const {name, age, networth} = generateUserInfo();
-
-    //   await db.writeTransaction(async tx => {
-    //     await tx.execute(
-    //       'INSERT INTO "users" (id, name, age, networth) VALUES(uuid(), ?, ?, ?)',
-    //       [name, age, networth],
-    //     );
-    //     await tx.rollback();
-    //   });
-
-    //   const res = await db.execute('SELECT * FROM users');
-    //   expect(res.rows?._array).to.eql([]);
-    // });
-
-    // [Fact]
-    // public async Task ManualRollbackTest()
-    // {
-    //     await db.WriteTransaction(async tx =>
-    //     {
-    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O5"]);
-    //         await tx.Rollback();
-    //     });
-
-    //     var result = await db.GetAll<object>("SELECT * FROM assets");
-    //     Assert.Empty(result);
-    // }
-
-    // [Fact]
-    // public async Task AutoRollbackTest()
-    // {
-    //     bool exceptionThrown = false;
-    //     try
-    //     {
-    //         await db.WriteTransaction(async tx =>
-    //         {
-    //             // This should throw an exception
-    //             await tx.Execute("INSERT INTO assets(id) VALUES_SYNTAX_ERROR(?)", ["O5"]);
-    //         });
-    //     }
-    //     catch (Exception ex)
-    //     {
-    //         Assert.Contains("near \"VALUES_SYNTAX_ERROR\": syntax error", ex.Message);
-    //         exceptionThrown = true;
-    //     }
-
-    //     var result = await db.GetAll<IdResult>("SELECT * FROM assets");
-    //     Assert.Empty(result);
-    //     Assert.True(exceptionThrown);
-    // }
-
-    // [Fact]
-    // public async Task WriteTransactionWithReturnTest()
-    // {
-    //     var result = await db.WriteTransaction(async tx =>
-    //     {
-    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O5"]);
-    //         return await tx.GetAll<IdResult>("SELECT * FROM assets");
-    //     });
-
-    //     Assert.Single(result);
-    //     Assert.Equal("O5", result.First().id);
-    // }
-
-
-    // [Fact]
-    // public async Task WriteTransactionNestedQueryTest()
-    // {
-    //     await db.WriteTransaction(async tx =>
-    //     {
-    //         await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O6"]);
-
-    //         var txQuery = await tx.GetAll<IdResult>("SELECT * FROM assets");
-    //         Assert.Single(txQuery);
-
-    //         var dbQuery = await db.GetAll<IdResult>("SELECT * FROM assets");
-    //         Assert.Empty(dbQuery);
-    //     });
-    // }
-
-    // [Fact]
-    // public async Task ReadLockShouldBeReadOnlyTest()
-    // {
-    //     string id = Guid.NewGuid().ToString();
-    //     bool exceptionThrown = false;
-
-    //     try
-    //     {
-    //         await db.ReadLock<object>(async context =>
-    //         {
-    //             return await context.Execute(
-    //                 "INSERT INTO assets (id) VALUES (?)",
-    //                 [id]
-    //             );
-    //         });
-
-    //         // If no exception is thrown, fail the test
-    //         throw new Exception("Did not throw");
-    //     }
-    //     catch (Exception ex)
-    //     {
-    //         Assert.Contains("attempt to write a readonly database", ex.Message);
-    //         exceptionThrown = true;
-    //     }
-
-    //     Assert.True(exceptionThrown);
-    // }
-
-    // [Fact]
-    // public async Task ReadLocksShouldQueueIfExceedNumberOfConnections()
-    // {
-    //     string id = Guid.NewGuid().ToString();
-
-    //     await db.Execute(
-    //         "INSERT INTO assets (id) VALUES (?)",
-    //         [id]
-    //     );
-
-    //     int numberOfReads = 20;
-    //     var tasks = Enumerable.Range(0, numberOfReads)
-    //         .Select(_ => db.ReadLock(async context =>
-    //         {
-    //             return await context.GetAll<AssetResult>("SELECT id FROM assets WHERE id = ?", [id]);
-    //         }))
-    //         .ToArray();
-
-    //     var lockResults = await Task.WhenAll(tasks);
-
-    //     var ids = lockResults.Select(r => r.FirstOrDefault()?.id).ToList();
-
-    //     Assert.All(ids, n => Assert.Equal(id, n));
-    // }
-
-    [Fact(Timeout = 2000)]
-    public async Task ShouldBeAbleToReadWhileAWriteIsRunning()
-    {
-        var tcs = new TaskCompletionSource();
-
-        // This wont resolve or free until another connection free's it
-        var writeTask = db.WriteLock(async context =>
-        {
-            await tcs.Task; // Wait until read lock signals to proceed
-        });
-
-        var readTask = db.ReadLock(async context =>
-        {
-            // Read logic could execute here while writeLock is still open
-            tcs.SetResult();
-            return 42;
-        });
-
-        var result = await readTask;
-        await writeTask; // Ensure write task completes
-
-        Assert.Equal(42, result);
-    }
-
-    // [Fact(Timeout = 5000)]
-    // public async Task TestConcurrentReads()
-    // {
-    //     await db.Execute("INSERT INTO assets(id) VALUES(?)", ["O6-conccurent-1"]);
-    //     var tcs = new TaskCompletionSource<bool>();
-
-    //     // Start a long-running write transaction
-    //     var transactionTask = Task.Run(async () =>
-    //     {
-    //         await db.WriteTransaction(async tx =>
-    //         {
-    //             await tx.Execute("INSERT INTO assets(id) VALUES(?)", ["O6-conccurent-2"]);
-    //             await tcs.Task;
-    //             await tx.Commit();
-    //         });
-    //     });
-
-    //     // Try and read while the write transaction is still open
-    //     var result = await db.GetAll<object>("SELECT * FROM assets");
-    //     Assert.Single(result); // The transaction is not commited yet, we should only read 1 user
-
-    //     // Let the transaction complete
-    //     tcs.SetResult(true);
-    //     await transactionTask;
-
-    //     // Read again after the transaction is committed
-    //     var afterTx = await db.GetAll<object>("SELECT * FROM assets");
-    //     Assert.Equal(2, afterTx.Length);
-    // }
-}
\ No newline at end of file

From 8187e7e629f4bc87ddd4e29bf2ad95d83c3a204b Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Tue, 11 Mar 2025 15:59:20 +0200
Subject: [PATCH 15/26] Updated tests, wip commit message

---
 .../PowerSync.Common/DB/Crud/CrudEntry.cs     | 13 +---
 .../DB/Schema/IndexedColumn.cs                |  2 +-
 PowerSync/PowerSync.Common/DB/Schema/Table.cs |  6 +-
 .../PowerSyncDatabaseTransactionTests.cs      | 27 ++++----
 .../{ => Client/Sync}/BucketStorageTests.cs   | 39 ++----------
 .../Client/Sync/CRUDTests.cs                  | 63 +++++++++++++++++++
 .../PowerSync.Common.Tests/TestSchema.cs      | 35 +++++++++++
 7 files changed, 122 insertions(+), 63 deletions(-)
 rename Tests/PowerSync/PowerSync.Common.Tests/{ => Client/Sync}/BucketStorageTests.cs (96%)
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs

diff --git a/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
index ce974d0..3f3de62 100644
--- a/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
+++ b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
@@ -87,18 +87,7 @@ public static CrudEntry FromRow(CrudEntryJSON dbRow)
         );
     }
 
-    public CrudEntryOutputJSON ToJSON()
-    {
-        return new CrudEntryOutputJSON
-        {
-            OpId = ClientId,
-            Op = Op,
-            Type = Table,
-            Id = Id,
-            TransactionId = TransactionId,
-            Data = OpData
-        };
-    }
+
 
     public override bool Equals(object? obj)
     {
diff --git a/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs b/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
index 1a42038..aaed214 100644
--- a/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
+++ b/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
@@ -17,7 +17,7 @@ public class IndexedColumn(IndexColumnOptions options)
 
     public object ToJSON(Table table)
     {
-        var colType = table.OriginalColumns.TryGetValue(Name, out var value) ? value : default;
+        var colType = table.Columns.TryGetValue(Name, out var value) ? value : default;
 
         return JsonConvert.SerializeObject(
          new
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Table.cs b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
index 65359b1..74257fe 100644
--- a/PowerSync/PowerSync.Common/DB/Schema/Table.cs
+++ b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
@@ -25,7 +25,8 @@ public class Table
 {
     protected TableOptions Options { get; set; }
 
-    public Dictionary<string, ColumnType> OriginalColumns;
+    public Dictionary<string, ColumnType> Columns;
+    public Dictionary<string, List<string>> Indexes;
 
     private readonly List<Column> ConvertedColumns;
     private readonly List<Index> ConvertedIndexes;
@@ -47,7 +48,8 @@ [.. kv.Value.Select(name =>
 
         Options = options ?? new TableOptions();
 
-        OriginalColumns = columns;
+        Columns = columns;
+        Indexes = Options?.Indexes ?? [];
     }
 
     public string ToJSON(string Name = "")
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
index 9509176..d920b46 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
@@ -7,13 +7,12 @@ public class PowerSyncDatabaseTransactionTests : IAsyncLifetime
 {
     private PowerSyncDatabase db = default!;
 
-
     public async Task InitializeAsync()
     {
         db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
         {
             Database = new SQLOpenOptions { DbFilename = "powersyncDataBaseTransactions.db" },
-            Schema = TestData.appSchema,
+            Schema = TestSchema.appSchema,
         });
         await db.Init();
     }
@@ -136,7 +135,7 @@ await db.WriteTransaction(async tx =>
     }
 
     [Fact]
-    public async Task ReadLockShouldBeReadOnlyTest()
+    public async Task ReadLockReadOnlyTest()
     {
         string id = Guid.NewGuid().ToString();
         bool exceptionThrown = false;
@@ -164,7 +163,7 @@ await db.ReadLock<object>(async context =>
     }
 
     [Fact]
-    public async Task ReadLocksShouldQueueIfExceedNumberOfConnectionsTest()
+    public async Task ReadLocksQueueIfExceedNumberOfConnectionsTest()
     {
         string id = Guid.NewGuid().ToString();
 
@@ -189,7 +188,7 @@ await db.Execute(
     }
 
     [Fact(Timeout = 2000)]
-    public async Task ShouldBeAbleToReadWhileAWriteIsRunningTest()
+    public async Task ReadWhileWriteIsRunningTest()
     {
         var tcs = new TaskCompletionSource();
 
@@ -214,7 +213,7 @@ public async Task ShouldBeAbleToReadWhileAWriteIsRunningTest()
     }
 
     [Fact(Timeout = 2000)]
-    public async Task ShouldQueueSimultaneousExecutionsTest()
+    public async Task QueueSimultaneousExecutionsTest()
     {
         var order = new List<int>();
         var operationCount = 5;
@@ -237,7 +236,7 @@ await db.WriteLock(async context =>
     }
 
     [Fact(Timeout = 2000)]
-    public async Task ShouldCallUpdateHookOnChangesTest()
+    public async Task CallUpdateHookOnChangesTest()
     {
         var cts = new CancellationTokenSource();
         var result = new TaskCompletionSource();
@@ -261,7 +260,7 @@ public async Task ShouldCallUpdateHookOnChangesTest()
     }
 
     [Fact(Timeout = 2000)]
-    public async Task ShouldReflectWriteTransactionUpdatesOnReadConnectionsTest()
+    public async Task ReflectWriteTransactionUpdatesOnReadConnectionsTest()
     {
         var watched = new TaskCompletionSource();
 
@@ -291,9 +290,9 @@ await db.WriteTransaction(async tx =>
     }
 
     [Fact(Timeout = 2000)]
-    public async Task ShouldReflectWriteLockUpdatesOnReadConnectionsTest()
+    public async Task ReflectWriteLockUpdatesOnReadConnectionsTest()
     {
-        var numberOfUsers = 10_000;
+        var numberOfAssets = 10_000;
 
         var watched = new TaskCompletionSource();
 
@@ -302,7 +301,7 @@ public async Task ShouldReflectWriteLockUpdatesOnReadConnectionsTest()
         {
             OnResult = (x) =>
             {
-                if (x.First().count == numberOfUsers)
+                if (x.First().count == numberOfAssets)
                 {
                     watched.SetResult();
                     cts.Cancel();
@@ -316,7 +315,7 @@ public async Task ShouldReflectWriteLockUpdatesOnReadConnectionsTest()
         await db.WriteLock(async tx =>
         {
             await tx.Execute("BEGIN");
-            for (var i = 0; i < numberOfUsers; i++)
+            for (var i = 0; i < numberOfAssets; i++)
             {
                 await tx.Execute("INSERT INTO assets (id) VALUES(?)", ["0" + i + "-writelock"]);
             }
@@ -327,7 +326,7 @@ await db.WriteLock(async tx =>
     }
 
     [Fact(Timeout = 5000)]
-    public async Task Insert10000Records_ShouldCompleteWithinTimeLimitTest()
+    public async Task Insert10000Records_CompleteWithinTimeLimitTest()
     {
         var random = new Random();
         var stopwatch = Stopwatch.StartNew();
@@ -368,7 +367,7 @@ await db.WriteTransaction(async tx =>
 
         // Try and read while the write transaction is still open
         var result = await db.GetAll<object>("SELECT * FROM assets");
-        Assert.Single(result); // The transaction is not commited yet, we should only read 1 user
+        Assert.Single(result); // The transaction is not commited yet, we should only read 1 asset
 
         // Let the transaction complete
         tcs.SetResult(true);
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/BucketStorageTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs
similarity index 96%
rename from Tests/PowerSync/PowerSync.Common.Tests/BucketStorageTests.cs
rename to Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs
index a33b240..cb254d7 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/BucketStorageTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs
@@ -1,4 +1,4 @@
-namespace PowerSync.Common.Tests;
+namespace PowerSync.Common.Tests.Client.Sync;
 
 using System.Threading.Tasks;
 
@@ -58,35 +58,6 @@ class TestData
         ObjectId = "O1",
         Checksum = 5
     });
-
-    public static Table assets = new Table(new Dictionary<string, ColumnType>
-        {
-            { "created_at", ColumnType.TEXT },
-            { "make", ColumnType.TEXT },
-            { "model", ColumnType.TEXT },
-            { "serial_number", ColumnType.TEXT },
-            { "quantity", ColumnType.INTEGER },
-            { "user_id", ColumnType.TEXT },
-            { "customer_id", ColumnType.TEXT },
-            { "description", ColumnType.TEXT },
-        }, new TableOptions
-        {
-            Indexes = new Dictionary<string, List<string>> { { "makemodel", new List<string> { "make", "model" } } }
-        });
-
-    public static Table customers = new Table(new Dictionary<string, ColumnType>
-        {
-            { "name", ColumnType.TEXT },
-            { "email", ColumnType.TEXT }
-        });
-
-
-    public static Schema appSchema = new Schema(new Dictionary<string, Table>
-        {
-            { "assets", assets },
-            { "customers", customers }
-        });
-
 }
 
 public class BucketStorageTests : IAsyncLifetime
@@ -100,7 +71,7 @@ public async Task InitializeAsync()
         db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
         {
             Database = new SQLOpenOptions { DbFilename = "powersync.db" },
-            Schema = TestData.appSchema,
+            Schema = TestSchema.appSchema,
         });
         await db.Init();
         bucketStorage = new SqliteBucketStorage(db.Database, createLogger());
@@ -526,7 +497,7 @@ await Assert.ThrowsAsync<SqliteException>(async () =>
         powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
         {
             Database = new SQLOpenOptions { DbFilename = dbName },
-            Schema = TestData.appSchema,
+            Schema = TestSchema.appSchema,
         });
         await powersync.Init();
 
@@ -545,7 +516,7 @@ public async Task ShouldRemoveTypes()
         var powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
         {
             Database = new SQLOpenOptions { DbFilename = dbName },
-            Schema = TestData.appSchema,
+            Schema = TestSchema.appSchema,
         });
 
         await powersync.Init();
@@ -587,7 +558,7 @@ await Assert.ThrowsAsync<SqliteException>(async () =>
         powersync = new PowerSyncDatabase(new PowerSyncDatabaseOptions
         {
             Database = new SQLOpenOptions { DbFilename = dbName },
-            Schema = TestData.appSchema,
+            Schema = TestSchema.appSchema,
         });
         await powersync.Init();
 
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
new file mode 100644
index 0000000..9afc30f
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
@@ -0,0 +1,63 @@
+namespace PowerSync.Common.Tests.Client;
+
+using System.Diagnostics;
+using Newtonsoft.Json;
+using PowerSync.Common.Client;
+using PowerSync.Common.Client.Sync.Bucket;
+using PowerSync.Common.DB.Crud;
+
+public class CRUDTests : IAsyncLifetime
+{
+    private PowerSyncDatabase db = default!;
+
+    public async Task InitializeAsync()
+    {
+        db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = "crudtest12xas.db" },
+            Schema = TestSchema.appSchema,
+        });
+        await db.Init();
+    }
+
+    public async Task DisposeAsync()
+    {
+        await db.DisconnectAndClear();
+        await db.Close();
+    }
+
+    [Fact]
+    public async Task Insert_ShouldRecordCrudEntry()
+    {
+        string testId = Guid.NewGuid().ToString();
+
+        var initialRows = await db.GetAll<object>("SELECT * FROM ps_crud");
+        Assert.Empty(initialRows);
+
+        await db.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", [testId, "test"]);
+        var crudEntry = await db.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "PUT",
+                type = "assets",
+                id = testId,
+                data = new { description = "test" }
+            }),
+            crudEntry.Data
+        );
+
+        var tx = await db.GetNextCrudTransaction();
+        Assert.Equal(1, tx!.TransactionId);
+
+        var expectedCrudEntry = new CrudEntry(1, UpdateType.PUT, "assets", testId, 1, new Dictionary<string, object>
+        {
+            { "description", "test" }
+        });
+
+        Assert.True(tx.Crud.First().Equals(expectedCrudEntry));
+    }
+
+
+}
\ No newline at end of file
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs b/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
new file mode 100644
index 0000000..cecaf92
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
@@ -0,0 +1,35 @@
+using PowerSync.Common.DB.Schema;
+
+namespace PowerSync.Common.Tests;
+
+
+public class TestSchema
+{
+    public static Table assets = new Table(new Dictionary<string, ColumnType>
+        {
+            { "created_at", ColumnType.TEXT },
+            { "make", ColumnType.TEXT },
+            { "model", ColumnType.TEXT },
+            { "serial_number", ColumnType.TEXT },
+            { "quantity", ColumnType.INTEGER },
+            { "user_id", ColumnType.TEXT },
+            { "customer_id", ColumnType.TEXT },
+            { "description", ColumnType.TEXT },
+        }, new TableOptions
+        {
+            Indexes = new Dictionary<string, List<string>> { { "makemodel", new List<string> { "make", "model" } } }
+        });
+
+    public static Table customers = new Table(new Dictionary<string, ColumnType>
+        {
+            { "name", ColumnType.TEXT },
+            { "email", ColumnType.TEXT }
+        });
+
+
+    public static Schema appSchema = new Schema(new Dictionary<string, Table>
+        {
+            { "assets", assets },
+            { "customers", customers }
+        });
+}
\ No newline at end of file

From 6baaa0edaca68657cbc32cf47175c1d52f039101 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Tue, 11 Mar 2025 17:56:29 +0200
Subject: [PATCH 16/26] Added temp package usage section.

---
 README.md | 26 ++++++++++++++++++++++++++
 1 file changed, 26 insertions(+)

diff --git a/README.md b/README.md
index 4043bd9..fc1bd55 100644
--- a/README.md
+++ b/README.md
@@ -87,3 +87,29 @@ Run a specific test
 dotnet test -v n --framework net8.0 --filter "test-file-pattern"  
 ```
 
+## Using the PowerSync.Common package in your project (temporary)
+A NuGet package will be available soon, until then you clone this repo and follow these steps:
+
+Add the dependency to your project's .csproj:
+```.xml
+  <ItemGroup>
+    <ProjectReference Include="..\..\powersync-dotnet\PowerSync\Powersync.Common\PowerSync.Common.csproj" />
+  </ItemGroup>
+```
+
+Which assumes the following directory structure:
+```
+code/
+  powersync-dotnet (X)
+  ├── PowerSync/PowerSync.Common
+  │   ├── PowerSync.Common.csproj
+  │   ├── Class1.cs
+  │   └── Utils.cs
+  └── root.sln
+
+  your-project
+  ├── demo
+  │   ├── Program.csproj
+  │   └── Program.cs
+  ├── root.sln
+```
\ No newline at end of file

From 89685744bce6c9e5a5a564328f3b529ccaf5266e Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Tue, 11 Mar 2025 18:09:39 +0200
Subject: [PATCH 17/26] Disable test in development.

---
 Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
index 9afc30f..4902fee 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
@@ -26,7 +26,7 @@ public async Task DisposeAsync()
         await db.Close();
     }
 
-    [Fact]
+    [Fact(Skip = "Need to delete db file")]
     public async Task Insert_ShouldRecordCrudEntry()
     {
         string testId = Guid.NewGuid().ToString();

From b4e093bddba92aaaa89a85270fad68b90d015e05 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Wed, 12 Mar 2025 14:06:11 +0200
Subject: [PATCH 18/26] Added further CrudTests. Ensuring read connection is
 closed, clearing db connection from pool.

---
 .../PowerSync.Common/DB/Crud/CrudEntry.cs     |   4 +-
 .../MDSQLite/MDSQLiteAdapter.cs               |   5 +-
 .../MDSQLite/MDSQLiteConnection.cs            |   2 +
 .../Client/Sync/CRUDTests.cs                  | 295 +++++++++++++++++-
 .../PowerSyncCredentialsTests.cs              | 143 ---------
 .../PowerSync.Common.Tests/TestSchema.cs      |   3 +-
 .../Utils/DatabaseUtils.cs                    |  39 +++
 7 files changed, 333 insertions(+), 158 deletions(-)
 delete mode 100644 Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs
 create mode 100644 Tests/PowerSync/PowerSync.Common.Tests/Utils/DatabaseUtils.cs

diff --git a/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
index 3f3de62..5fe8f02 100644
--- a/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
+++ b/PowerSync/PowerSync.Common/DB/Crud/CrudEntry.cs
@@ -30,7 +30,7 @@ public class CrudEntryJSON
 public class CrudEntryDataJSON
 {
     [JsonProperty("data")]
-    public Dictionary<string, object> Data { get; set; } = new();
+    public Dictionary<string, object>? Data { get; set; }
 
     [JsonProperty("op")]
     public UpdateType Op { get; set; }
@@ -87,8 +87,6 @@ public static CrudEntry FromRow(CrudEntryJSON dbRow)
         );
     }
 
-
-
     public override bool Equals(object? obj)
     {
         if (obj is not CrudEntry other) return false;
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
index ff87267..e9653f4 100644
--- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
@@ -32,8 +32,8 @@ public class MDSQLiteAdapter : EventStream<DBAdapterEvent>, IDBAdapter
     protected RequiredMDSQLiteOptions resolvedMDSQLiteOptions;
     private CancellationTokenSource? tablesUpdatedCts;
 
-    private static readonly AsyncLock writeMutex = new();
-    private static readonly AsyncLock readMutex = new();
+    private readonly AsyncLock writeMutex = new();
+    private readonly AsyncLock readMutex = new();
 
     public MDSQLiteAdapter(MDSQLiteAdapterOptions options)
     {
@@ -143,6 +143,7 @@ private void LoadExtension(SqliteConnection db)
         tablesUpdatedCts?.Cancel();
         base.Close();
         writeConnection?.Close();
+        readConnection?.Close();
     }
 
     public async Task<NonQueryResult> Execute(string query, object[]? parameters = null)
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
index 7d41507..3b36fe0 100644
--- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
@@ -213,6 +213,8 @@ public async Task<T> Get<T>(string sql, object[]? parameters = null)
     {
         base.Close();
         Db.Close();
+        // https://stackoverflow.com/questions/8511901/system-data-sqlite-close-not-releasing-database-file
+        SqliteConnection.ClearPool(Db);
     }
 
     public async Task RefreshSchema()
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
index 4902fee..5845580 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
@@ -1,20 +1,25 @@
-namespace PowerSync.Common.Tests.Client;
+namespace PowerSync.Common.Tests.Client.Sync;
+
+using Microsoft.Data.Sqlite;
 
-using System.Diagnostics;
 using Newtonsoft.Json;
+
 using PowerSync.Common.Client;
-using PowerSync.Common.Client.Sync.Bucket;
 using PowerSync.Common.DB.Crud;
+using PowerSync.Common.DB.Schema;
+using PowerSync.Common.Tests.Utils;
 
 public class CRUDTests : IAsyncLifetime
 {
     private PowerSyncDatabase db = default!;
+    private readonly string testId = Guid.NewGuid().ToString();
+    private readonly string dbName = "crud-test.db";
 
     public async Task InitializeAsync()
     {
         db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
         {
-            Database = new SQLOpenOptions { DbFilename = "crudtest12xas.db" },
+            Database = new SQLOpenOptions { DbFilename = dbName },
             Schema = TestSchema.appSchema,
         });
         await db.Init();
@@ -24,13 +29,12 @@ public async Task DisposeAsync()
     {
         await db.DisconnectAndClear();
         await db.Close();
+        DatabaseUtils.CleanDb(dbName);
     }
 
-    [Fact(Skip = "Need to delete db file")]
-    public async Task Insert_ShouldRecordCrudEntry()
+    [Fact]
+    public async Task Insert_RecordCrudEntryTest()
     {
-        string testId = Guid.NewGuid().ToString();
-
         var initialRows = await db.GetAll<object>("SELECT * FROM ps_crud");
         Assert.Empty(initialRows);
 
@@ -59,5 +63,280 @@ public async Task Insert_ShouldRecordCrudEntry()
         Assert.True(tx.Crud.First().Equals(expectedCrudEntry));
     }
 
+    private record CountResult(int count);
+
+    [Fact]
+    public async Task InsertOrReplaceTest()
+    {
+        await db.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", [testId, "test"]);
+        await db.Execute("DELETE FROM ps_crud WHERE 1");
+
+        // Replace existing entry
+        await db.Execute("INSERT OR REPLACE INTO assets(id, description) VALUES(?, ?)", [testId, "test2"]);
+
+        var crudEntry = await db.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "PUT",
+                type = "assets",
+                id = testId,
+                data = new { description = "test2" }
+            }),
+            crudEntry.Data
+        );
+
+        var assetCount = await db.Get<CountResult>("SELECT count(*) as count FROM assets");
+        Assert.Equal(1, assetCount.count);
+
+        // Test uniqueness constraint
+        var ex = await Assert.ThrowsAsync<SqliteException>(() =>
+            db.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", [testId, "test3"])
+        );
+
+        Assert.Contains("UNIQUE constraint failed", ex.Message);
+    }
+
+    [Fact]
+    public async Task UpdateTest()
+    {
+        await db.Execute("INSERT INTO assets(id, description, make) VALUES(?, ?, ?)", [testId, "test", "test"]);
+        await db.Execute("DELETE FROM ps_crud WHERE 1");
+
+        await db.Execute("UPDATE assets SET description = ? WHERE id = ?", ["test2", testId]);
+
+        var crudEntry = await db.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "PATCH",
+                type = "assets",
+                id = testId,
+                data = new { description = "test2" }
+            }),
+            crudEntry.Data
+        );
+
+        var tx = await db.GetNextCrudTransaction();
+        Assert.Equal(2, tx!.TransactionId);
+
+        var expectedCrudEntry = new CrudEntry(2, UpdateType.PATCH, "assets", testId, 2, new Dictionary<string, object>
+        {
+            { "description", "test2" }
+        });
+
+        Assert.True(tx.Crud.First().Equals(expectedCrudEntry));
+    }
+
+    [Fact]
+    public async Task DeleteTest()
+    {
+        await db.Execute("INSERT INTO assets(id, description, make) VALUES(?, ?, ?)", [testId, "test", "test"]);
+        await db.Execute("DELETE FROM ps_crud WHERE 1");
+
+        await db.Execute("DELETE FROM assets WHERE id = ?", [testId]);
+
+        var crudEntry = await db.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "DELETE",
+                type = "assets",
+                id = testId
+            }),
+            crudEntry.Data
+        );
+
+        var tx = await db.GetNextCrudTransaction();
+        Assert.Equal(2, tx!.TransactionId);
+
+        var expectedCrudEntry = new CrudEntry(2, UpdateType.DELETE, "assets", testId, 2);
+        Assert.Equal(expectedCrudEntry, tx.Crud.First());
+    }
+
+    [Fact]
+    public async Task InsertOnlyTablesTest()
+    {
+        var logs = new Table(new Dictionary<string, ColumnType>
+        {
+            { "level", ColumnType.TEXT },
+            { "content", ColumnType.TEXT },
+        }, new TableOptions
+        {
+            InsertOnly = true
+        });
+
+        Schema insertOnlySchema = new Schema(new Dictionary<string, Table>
+        {
+            { "logs", logs },
+        });
+
+        var uniqueDbName = $"test-{Guid.NewGuid()}.db";
+
+        var insertOnlyDb = new PowerSyncDatabase(new PowerSyncDatabaseOptions
+        {
+            Database = new SQLOpenOptions { DbFilename = uniqueDbName },
+            Schema = insertOnlySchema,
+        });
+
+        await insertOnlyDb.Init();
+
+        var initialCrudRows = await insertOnlyDb.GetAll<object>("SELECT * FROM ps_crud");
+        Assert.Empty(initialCrudRows);
+
+        await insertOnlyDb.Execute("INSERT INTO logs(id, level, content) VALUES(?, ?, ?)", [testId, "INFO", "test log"]);
+
+        var crudEntry = await insertOnlyDb.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "PUT",
+                type = "logs",
+                id = testId,
+                data = new { content = "test log", level = "INFO" }
+            }),
+            crudEntry.Data
+        );
+
+        var logRows = await insertOnlyDb.GetAll<object>("SELECT * FROM logs");
+        Assert.Empty(logRows);
+
+        var tx = await insertOnlyDb.GetNextCrudTransaction();
+        Assert.Equal(1, tx!.TransactionId);
+
+        var expectedCrudEntry = new CrudEntry(1, UpdateType.PUT, "logs", testId, 1, new Dictionary<string, object>
+        {
+            { "content", "test log" },
+            { "level", "INFO" }
+        });
+
+        Assert.True(tx.Crud.First().Equals(expectedCrudEntry));
+    }
+
+
+    private record QuantityResult(long quantity);
+
+    [Fact]
+    public async Task BigNumbersIntegerTest()
+    {
+        long bigNumber = 1L << 62;
+        await db.Execute("INSERT INTO assets(id, quantity) VALUES(?, ?)", [testId, bigNumber]);
+
+        var result = await db.Get<QuantityResult>("SELECT quantity FROM assets WHERE id = ?", [testId]);
+        Assert.Equal(bigNumber, result.quantity);
 
+        var crudEntry = await db.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "PUT",
+                type = "assets",
+                id = testId,
+                data = new { quantity = bigNumber }
+            }),
+            crudEntry.Data
+        );
+
+        var tx = await db.GetNextCrudTransaction();
+        Assert.Equal(1, tx!.TransactionId);
+
+        var expectedCrudEntry = new CrudEntry(1, UpdateType.PUT, "assets", testId, 1, new Dictionary<string, object>
+        {
+            { "quantity", bigNumber }
+        });
+
+        Assert.True(tx.Crud.First().Equals(expectedCrudEntry));
+    }
+
+    [Fact]
+    public async Task BigNumbersTextTest()
+    {
+        long bigNumber = 1L << 62;
+        await db.Execute("INSERT INTO assets(id, quantity) VALUES(?, ?)", [testId, bigNumber.ToString()]);
+
+        var result = await db.Get<QuantityResult>("SELECT quantity FROM assets WHERE id = ?", [testId]);
+        Assert.Equal(bigNumber, result.quantity);
+
+        var crudEntry = await db.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "PUT",
+                type = "assets",
+                id = testId,
+                data = new { quantity = bigNumber.ToString() }
+            }),
+            crudEntry.Data
+        );
+
+        await db.Execute("DELETE FROM ps_crud WHERE 1");
+
+        await db.Execute("UPDATE assets SET description = ?, quantity = CAST(quantity AS INTEGER) + 1 WHERE id = ?", [
+            "updated",
+        testId
+        ]);
+
+        crudEntry = await db.Get<CrudEntryJSON>("SELECT data FROM ps_crud ORDER BY id");
+
+        Assert.Equal(
+            JsonConvert.SerializeObject(new
+            {
+                op = "PATCH",
+                type = "assets",
+                id = testId,
+                data = new { description = "updated", quantity = bigNumber + 1 }
+            }),
+            crudEntry.Data
+        );
+    }
+
+    [Fact]
+    public async Task TransactionGroupingTest()
+    {
+        var initialCrudRows = await db.GetAll<object>("SELECT * FROM ps_crud");
+        Assert.Empty(initialCrudRows);
+
+        await db.WriteTransaction(async (tx) =>
+        {
+            await tx.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", [testId, "test1"]);
+            await tx.Execute("INSERT INTO assets(id, description) VALUES(?, ?)", ["test2", "test2"]);
+        });
+
+        await db.WriteTransaction(async (tx) =>
+        {
+            await tx.Execute("UPDATE assets SET description = ? WHERE id = ?", ["updated", testId]);
+        });
+
+        var tx1 = await db.GetNextCrudTransaction();
+        Assert.Equal(1, tx1!.TransactionId);
+
+        var expectedCrudEntries = new[]
+        {
+            new CrudEntry(1, UpdateType.PUT, "assets", testId, 1, new Dictionary<string, object> { { "description", "test1" } }),
+            new CrudEntry(2, UpdateType.PUT, "assets", "test2", 1, new Dictionary<string, object> { { "description", "test2" } })
+        };
+
+        Assert.True(tx1.Crud.Select((entry, index) => entry.Equals(expectedCrudEntries[index])).All(result => result));
+        await tx1.Complete();
+
+        var tx2 = await db.GetNextCrudTransaction();
+        Assert.Equal(2, tx2!.TransactionId);
+
+        var expectedCrudEntry2 = new CrudEntry(3, UpdateType.PATCH, "assets", testId, 2, new Dictionary<string, object>
+        {
+            { "description", "updated" }
+        });
+
+        Assert.True(tx2.Crud.First().Equals(expectedCrudEntry2));
+        await tx2.Complete();
+
+        var nextTx = await db.GetNextCrudTransaction();
+        Assert.Null(nextTx);
+    }
 }
\ No newline at end of file
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs
deleted file mode 100644
index bc9a81a..0000000
--- a/Tests/PowerSync/PowerSync.Common.Tests/PowerSyncCredentialsTests.cs
+++ /dev/null
@@ -1,143 +0,0 @@
-namespace PowerSync.Common.Tests;
-
-using PowerSync.Common.Client;
-using PowerSync.Common.Client.Connection;
-using PowerSync.Common.DB.Schema;
-using Newtonsoft.Json;
-
-// Some of this can be moved over to general tests/loading version?
-public class PowerSyncCredentialsTests
-{
-    private Schema AppSchema;
-    public PowerSyncCredentialsTests()
-    {
-        var users = new Table(new Dictionary<string, ColumnType>
-        {
-            { "name", ColumnType.TEXT },
-            { "age", ColumnType.INTEGER }
-        });
-
-        var posts = new Table(new Dictionary<string, ColumnType>
-        {
-            { "title", ColumnType.TEXT },
-            { "content", ColumnType.TEXT }
-        });
-
-        AppSchema = new Schema(new Dictionary<string, Table>
-        {
-            { "users", users },
-            { "posts", posts }
-        });
-    }
-
-    [Fact(Skip = "Skipping this test temporarily")]
-    public void SimpleTest()
-    {
-        var endpoint = "http://localhost";
-        var token = "token";
-        var expiresAt = new DateTime();
-        PowerSyncCredentials credentials = new PowerSyncCredentials(endpoint, token, expiresAt);
-        Assert.Equal(endpoint, credentials.Endpoint);
-        Assert.Equal(token, credentials.Token);
-        Assert.Equal(expiresAt, credentials.ExpiresAt);
-    }
-
-    [Fact(Skip = "Skipping this test temporarily")]
-    public async void LoadVersion()
-    {
-        // var db = new MDSAdapter();
-        var db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
-        {
-            Database = new SQLOpenOptions { DbFilename = "x.db" },
-            Schema = AppSchema,
-        });
-        Console.WriteLine("Pre adapter" + db.SdkVersion);
-        await db.WaitForReady();
-        Console.WriteLine("Post adapter" + db.SdkVersion);
-
-        await db.Execute(@"CREATE TABLE Users (
-        Id INTEGER PRIMARY KEY AUTOINCREMENT,
-        Name TEXT NOT NULL
-        );");
-
-        await db.Execute(@"INSERT INTO Users (Name) VALUES ('Alice');");
-        await db.Execute(@"INSERT INTO Users (Name) VALUES ('Bob');");
-        await db.Execute(@"UPDATE USERS set Name = 'Wonderland' where Name = 'Alice';");
-
-        var x = await db.GetAll<object>("SELECT Name FROM Users limit 1;", []);
-
-        string json = JsonConvert.SerializeObject(x, Formatting.Indented);
-        Console.WriteLine("Result: " + json);
-        // var x = await db.Execute("SELECT powersync_rs_version() as version");
-        // Console.WriteLine(x.Rows.Array.First().First());
-
-        // var x = await db.Execute("SELECT powersync_rs_version() as version");
-        // using var connection = new SqliteConnection("Data Source=:memory:");
-        // var db = new MDSConnection(new MDSConnectionOptions(connection));
-        // connection.Open();
-
-        // string extensionPath = Path.Combine(Directory.GetCurrentDirectory(), "../../../libpowersync.dylib");
-
-        // connection.LoadExtension(extensionPath);
-
-        // var x = await db.Execute("SELECT powersync_rs_version() as version where 1 = 0;");
-        // var x = await db.Execute("SELECT * FROM Users WHERE 1 = 0;");
-
-
-        // Console.WriteLine(x.Rows.Array.First().First().Value);
-        // new AbstractPowerSyncDatabase();
-        // await Task.Delay(5000);
-    }
-
-    private record User(string Name, int Age);
-
-    [Fact]
-    public async void SchemaTest()
-    {
-        var db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
-        {
-            Database = new SQLOpenOptions { DbFilename = "xxxx.db" },
-            Schema = AppSchema,
-        });
-        await db.DisconnectAndClear();
-        // const schema = new Schema({
-        //   users: new Table({
-        //     name: column.text,
-        //     age: { type: ColumnType.INTEGER }
-        //   }),
-        //   posts: new Table({
-        //     title: column.text,
-        //     content: column.text
-        //   })
-        // });
-
-
-        // var x = await db.GetAll<object>("SELECT name, sql FROM sqlite_master WHERE type='table' ORDER BY name;");
-        // string json = JsonConvert.SerializeObject(x, Formatting.Indented);
-        // Console.WriteLine("Result: " + json);
-        await db.Execute(@"INSERT INTO users (id, name, age) VALUES ('1','Alice', 20);");
-
-        var b = await db.GetAll<object>("SELECT * from users");
-        string jsona = JsonConvert.SerializeObject(b, Formatting.Indented);
-
-        Console.WriteLine("Result xxx: " + jsona);
-        // Console.WriteLine("Result xxx: " + (User)b[0]);
-
-        // var c = await db.Execute("PRAGMA table_info(users);");
-        // string jsonb = JsonConvert.SerializeObject(c.Rows.Array, Formatting.Indented);
-
-        // var k = await db.Database.ReadTransaction(async (tx) =>
-        // {
-        //     Console.WriteLine("reee");
-
-        //     return await tx.Execute("select * from users limit 1");
-        // });
-        // string jsonb = JsonConvert.SerializeObject(k.Rows.Array, Formatting.Indented);
-
-        // Console.WriteLine(jsonb);
-        // 
-
-        // Console.WriteLine(AppSchema.ToJson());
-    }
-
-}
\ No newline at end of file
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs b/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
index cecaf92..8d7ad0d 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
@@ -1,7 +1,6 @@
-using PowerSync.Common.DB.Schema;
-
 namespace PowerSync.Common.Tests;
 
+using PowerSync.Common.DB.Schema;
 
 public class TestSchema
 {
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Utils/DatabaseUtils.cs b/Tests/PowerSync/PowerSync.Common.Tests/Utils/DatabaseUtils.cs
new file mode 100644
index 0000000..f820b84
--- /dev/null
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Utils/DatabaseUtils.cs
@@ -0,0 +1,39 @@
+namespace PowerSync.Common.Tests.Utils;
+
+public static class DatabaseUtils
+{
+    public static void CleanDb(string path)
+    {
+        TryDelete(path);
+        TryDelete($"{path}-shm");
+        TryDelete($"{path}-wal");
+    }
+
+    private static void TryDelete(string filePath)
+    {
+        var file = new FileInfo(filePath);
+        if (!file.Exists)
+            return;
+
+        const int retryCount = 3;
+        int attempt = 0;
+
+        while (attempt < retryCount)
+        {
+            try
+            {
+                file.Delete();
+                file.Refresh(); // force state update
+                if (!file.Exists)
+                    return;
+            }
+            catch (IOException)
+            {
+                attempt++;
+                Thread.Sleep(100);
+            }
+        }
+
+        Console.Error.WriteLine($"Failed to delete file after {retryCount} attempts: {filePath}");
+    }
+}
\ No newline at end of file

From f4f998aee2e5dd728ccd0a89b7cbed7abd1f6b63 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Wed, 12 Mar 2025 14:07:12 +0200
Subject: [PATCH 19/26] Made Watch return a Task, which completes after it has
 finished initializing.

---
 PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs      | 6 +++++-
 PowerSync/PowerSync.Common/README.md                        | 6 +++---
 .../Client/PowerSyncDatabaseTransactionTests.cs             | 5 ++---
 demos/CommandLine/Demo.cs                                   | 2 +-
 4 files changed, 11 insertions(+), 8 deletions(-)

diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
index 62cc759..85d8980 100644
--- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
+++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
@@ -451,6 +451,7 @@ await Database.WriteTransaction(async tx =>
 
             if (txId == null)
             {
+
                 all = [CrudEntry.FromRow(first)];
             }
             else
@@ -573,8 +574,9 @@ public async Task<T> WriteTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockO
     /// Use <see cref="SQLWatchOptions.ThrottleMs"/> to specify the minimum interval between queries.
     /// Source tables are automatically detected using <c>EXPLAIN QUERY PLAN</c>.
     /// </summary>
-    public void Watch<T>(string query, object[]? parameters, WatchHandler<T> handler, SQLWatchOptions? options = null)
+    public Task Watch<T>(string query, object[]? parameters, WatchHandler<T> handler, SQLWatchOptions? options = null)
     {
+        var tcs = new TaskCompletionSource();
         Task.Run(async () =>
         {
             try
@@ -604,12 +606,14 @@ public void Watch<T>(string query, object[]? parameters, WatchHandler<T> handler
                     Signal = options?.Signal,
                     ThrottleMs = options?.ThrottleMs
                 });
+                tcs.SetResult();
             }
             catch (Exception ex)
             {
                 handler.OnError?.Invoke(ex);
             }
         });
+        return tcs.Task;
     }
 
     private record ExplainedResult(string opcode, int p2, int p3);
diff --git a/PowerSync/PowerSync.Common/README.md b/PowerSync/PowerSync.Common/README.md
index 186b3ed..818a081 100644
--- a/PowerSync/PowerSync.Common/README.md
+++ b/PowerSync/PowerSync.Common/README.md
@@ -24,12 +24,12 @@ static async Task Main() {
 
 ```
 
-
 ### Watched queries
-Watched queries will automatically update when a dependant table is updated.
+Watched queries will automatically update when a dependant table is updated. 
+Awaiting `Watch()` ensures the watcher is fully initialized and ready to monitor database changes.
 
 ```csharp
-db.Watch("select * from lists", null, new WatchHandler<ListResult>
+await db.Watch("select * from lists", null, new WatchHandler<ListResult>
 {
     OnResult = (results) =>
     {
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
index d920b46..8149cf2 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
@@ -265,7 +265,7 @@ public async Task ReflectWriteTransactionUpdatesOnReadConnectionsTest()
         var watched = new TaskCompletionSource();
 
         var cts = new CancellationTokenSource();
-        db.Watch("SELECT COUNT(*) as count FROM assets", null, new WatchHandler<CountResult>
+        await db.Watch("SELECT COUNT(*) as count FROM assets", null, new WatchHandler<CountResult>
         {
             OnResult = (x) =>
             {
@@ -277,7 +277,6 @@ public async Task ReflectWriteTransactionUpdatesOnReadConnectionsTest()
             }
         }, new SQLWatchOptions
         {
-
             Signal = cts.Token
         });
 
@@ -297,7 +296,7 @@ public async Task ReflectWriteLockUpdatesOnReadConnectionsTest()
         var watched = new TaskCompletionSource();
 
         var cts = new CancellationTokenSource();
-        db.Watch("SELECT COUNT(*) as count FROM assets", null, new WatchHandler<CountResult>
+        await db.Watch("SELECT COUNT(*) as count FROM assets", null, new WatchHandler<CountResult>
         {
             OnResult = (x) =>
             {
diff --git a/demos/CommandLine/Demo.cs b/demos/CommandLine/Demo.cs
index 35c00ae..dacb958 100644
--- a/demos/CommandLine/Demo.cs
+++ b/demos/CommandLine/Demo.cs
@@ -31,7 +31,7 @@ static async Task Main()
 
         bool running = true;
 
-        db.Watch("select * from lists", null, new WatchHandler<ListResult>
+        await db.Watch("select * from lists", null, new WatchHandler<ListResult>
         {
             OnResult = (results) =>
             {

From 9b1928059dd3f63d85adb112102b5e0d9fd5d861 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Wed, 12 Mar 2025 14:44:27 +0200
Subject: [PATCH 20/26] Removed Class1.cs file

---
 src/Common/Class1.cs | 9 ---------
 1 file changed, 9 deletions(-)
 delete mode 100644 src/Common/Class1.cs

diff --git a/src/Common/Class1.cs b/src/Common/Class1.cs
deleted file mode 100644
index 7f34076..0000000
--- a/src/Common/Class1.cs
+++ /dev/null
@@ -1,9 +0,0 @@
-namespace Common;
-
-public class Class1
-{
-    public void Write()
-    {
-        Console.WriteLine("Hello from Common!");
-    }
-}

From 61bffc8d1c1ed01d2a5a8596b37327ce4c4bf90d Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Wed, 12 Mar 2025 14:47:06 +0200
Subject: [PATCH 21/26] Removing demo tmp file.

---
 demos/CommandLine/.gitignore  | 1 +
 demos/CommandLine/user_id.txt | 1 -
 2 files changed, 1 insertion(+), 1 deletion(-)
 create mode 100644 demos/CommandLine/.gitignore
 delete mode 100644 demos/CommandLine/user_id.txt

diff --git a/demos/CommandLine/.gitignore b/demos/CommandLine/.gitignore
new file mode 100644
index 0000000..e335cef
--- /dev/null
+++ b/demos/CommandLine/.gitignore
@@ -0,0 +1 @@
+user_id.txt
\ No newline at end of file
diff --git a/demos/CommandLine/user_id.txt b/demos/CommandLine/user_id.txt
deleted file mode 100644
index 944c084..0000000
--- a/demos/CommandLine/user_id.txt
+++ /dev/null
@@ -1 +0,0 @@
-eb6b96fc-6f45-4b36-ac92-0f47fae35838
\ No newline at end of file

From 683cdc77b5cad57f735b431040530ec269fea83f Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Wed, 12 Mar 2025 15:20:49 +0200
Subject: [PATCH 22/26] Newline formatting.

---
 PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs        | 4 ----
 PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs       | 1 -
 .../Client/Sync/Stream/StreamingSyncImplementation.cs         | 3 ---
 .../PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs | 1 -
 PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs              | 2 --
 PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs         | 1 -
 PowerSync/PowerSync.Common/DB/Schema/Schema.cs                | 2 --
 PowerSync/PowerSync.Common/DB/Schema/Table.cs                 | 2 --
 PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs        | 2 --
 PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs     | 1 -
 PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs        | 2 --
 .../Client/PowerSyncDatabaseTransactionTests.cs               | 1 -
 .../PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs  | 1 -
 .../PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs | 1 -
 Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs    | 2 --
 Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs          | 1 -
 src/Common/DB/Crud/SyncStatus.cs                              | 2 --
 17 files changed, 29 deletions(-)

diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
index 85d8980..69b69f7 100644
--- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
+++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
@@ -40,7 +40,6 @@ public class OpenFactorySource(ISQLOpenFactory Factory) : DatabaseSource
     public ISQLOpenFactory Factory { get; init; } = Factory;
 }
 
-
 public class PowerSyncDatabaseOptions() : BasePowerSyncDatabaseOptions()
 {
     /// <summary> 
@@ -566,8 +565,6 @@ public async Task<T> WriteTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockO
         return await Database.WriteTransaction(fn, options);
     }
 
-
-
     /// <summary>
     /// Executes a read query every time the source tables are modified.
     /// <para />
@@ -633,7 +630,6 @@ public async Task<string[]> ResolveTables(string sql, object[]? parameters = nul
                 .Select(row => row.p2)
                 .ToList();
 
-
             var tables = await GetAll<TableSelectResult>(
                 "SELECT DISTINCT tbl_name FROM sqlite_master WHERE rootpage IN (SELECT json_each.value FROM json_each(?))",
                 [JsonConvert.SerializeObject(rootPages)]
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
index 395555c..d293521 100644
--- a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
@@ -87,7 +87,6 @@ public async Task<T> Get<T>(string path, Dictionary<string, string>? headers = n
         return JsonConvert.DeserializeObject<T>(responseData)!;
     }
 
-
     public async IAsyncEnumerable<StreamingSyncLine?> PostStream(SyncStreamOptions options)
     {
         using var requestMessage = await BuildRequest(HttpMethod.Post, options.Path, options.Data, options.Headers);
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
index d1dec26..7b331fa 100644
--- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
@@ -90,7 +90,6 @@ public class PowerSyncConnectionOptions(
     public int? CrudUploadThrottleMs { get; set; } = crudUploadThrottleMs;
 }
 
-
 public class StreamingSyncImplementation : EventStream<StreamingSyncImplementationEvent>
 {
     public static RequiredPowerSyncConnectionOptions DEFAULT_STREAM_CONNECTION_OPTIONS = new()
@@ -127,7 +126,6 @@ public StreamingSyncImplementation(StreamingSyncImplementationOptions options)
             }
         });
 
-
         locks = new StreamingSyncLocks();
         logger = options.Logger ?? NullLogger.Instance;
 
@@ -168,7 +166,6 @@ public async Task Connect(PowerSyncConnectionOptions? options = null)
         }
         CancellationTokenSource = new CancellationTokenSource();
 
-
         streamingSyncTask = StreamingSync(CancellationTokenSource.Token, options);
 
         var tcs = new TaskCompletionSource<bool>();
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs
index c46d4d1..6f78059 100644
--- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncTypes.cs
@@ -135,7 +135,6 @@ public class StreamingSyncKeepalive : StreamingSyncLine
     public int? TokenExpiresIn { get; set; }
 }
 
-
 public class CrudRequest
 {
     [JsonProperty("data")]
diff --git a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs
index a12fcf3..b04d1d9 100644
--- a/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs
+++ b/PowerSync/PowerSync.Common/DB/Crud/SyncStatus.cs
@@ -24,7 +24,6 @@ public SyncStatusOptions(SyncStatusOptions options)
         HasSynced = options.HasSynced;
     }
 
-
     [JsonProperty("connected")]
     public bool? Connected { get; set; }
 
@@ -66,7 +65,6 @@ public class SyncStatus(SyncStatusOptions options)
     /// </summary>
     public SyncDataFlowStatus DataFlowStatus => Options.DataFlow ?? new SyncDataFlowStatus();
 
-
     public bool IsEqual(SyncStatus status)
     {
         return JsonConvert.SerializeObject(Options) == JsonConvert.SerializeObject(status.Options);
diff --git a/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs b/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
index aaed214..0a8430b 100644
--- a/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
+++ b/PowerSync/PowerSync.Common/DB/Schema/IndexedColumn.cs
@@ -14,7 +14,6 @@ public class IndexedColumn(IndexColumnOptions options)
 
     protected bool Ascending { get; set; } = options.Ascending;
 
-
     public object ToJSON(Table table)
     {
         var colType = table.Columns.TryGetValue(Name, out var value) ? value : default;
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Schema.cs b/PowerSync/PowerSync.Common/DB/Schema/Schema.cs
index 2fc5e0e..0742088 100644
--- a/PowerSync/PowerSync.Common/DB/Schema/Schema.cs
+++ b/PowerSync/PowerSync.Common/DB/Schema/Schema.cs
@@ -20,8 +20,6 @@ public string ToJSON()
             }).ToList()
         };
 
-
         return JsonConvert.SerializeObject(jsonObject);
     }
 }
-
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Table.cs b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
index 74257fe..6901dcd 100644
--- a/PowerSync/PowerSync.Common/DB/Schema/Table.cs
+++ b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
@@ -5,7 +5,6 @@ namespace PowerSync.Common.DB.Schema;
 // TODO CL Need to port this to C#
 // export const InvalidSQLCharacters = /["'%,.#\s[\]]/;
 
-
 public class TableOptions(
     Dictionary<string, List<string>>? indexes = null,
     bool? localOnly = null,
@@ -66,4 +65,3 @@ public string ToJSON(string Name = "")
         return JsonConvert.SerializeObject(jsonObject);
     }
 }
-
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
index e9653f4..b29b788 100644
--- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteAdapter.cs
@@ -63,7 +63,6 @@ private async Task Init()
         writeConnection = await OpenConnection(options.Name);
         readConnection = await OpenConnection(options.Name);
 
-
         string[] baseStatements =
         [
             $"PRAGMA busy_timeout = {resolvedMDSQLiteOptions.LockTimeoutMs}",
@@ -79,7 +78,6 @@ private async Task Init()
             $"PRAGMA synchronous = {resolvedMDSQLiteOptions.Synchronous}",
         ];
 
-
         string[] readConnectionStatements =
         [
             .. baseStatements,
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
index 3b36fe0..f0cda83 100644
--- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
@@ -127,7 +127,6 @@ public async Task<NonQueryResult> Execute(string query, object[]? parameters = n
         };
     }
 
-
     public async Task<QueryResult> ExecuteQuery(string query, object[]? parameters = null)
     {
         var result = new QueryResult();
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs
index f44f708..a873160 100644
--- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteOptions.cs
@@ -53,7 +53,6 @@ public sealed class SqliteSynchronous
     public static implicit operator string(SqliteSynchronous mode) => mode.Value;
 }
 
-
 public class SqliteExtension
 {
     public string Path { get; set; } = string.Empty;
@@ -105,7 +104,6 @@ public class MDSQLiteOptions
     public SqliteExtension[]? Extensions { get; set; }
 }
 
-
 public class RequiredMDSQLiteOptions : MDSQLiteOptions
 {
     public static RequiredMDSQLiteOptions DEFAULT_SQLITE_OPTIONS = new()
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
index 8149cf2..40e84b1 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/PowerSyncDatabaseTransactionTests.cs
@@ -118,7 +118,6 @@ public async Task WriteTransactionWithReturnTest()
         Assert.Equal("O5", result.First().id);
     }
 
-
     [Fact]
     public async Task WriteTransactionNestedQueryTest()
     {
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs
index cb254d7..7025038 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/BucketStorageTests.cs
@@ -65,7 +65,6 @@ public class BucketStorageTests : IAsyncLifetime
     private PowerSyncDatabase db = default!;
     private IBucketStorageAdapter bucketStorage = default!;
 
-
     public async Task InitializeAsync()
     {
         db = new PowerSyncDatabase(new PowerSyncDatabaseOptions
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
index 5845580..6f31ee4 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/Client/Sync/CRUDTests.cs
@@ -217,7 +217,6 @@ public async Task InsertOnlyTablesTest()
         Assert.True(tx.Crud.First().Equals(expectedCrudEntry));
     }
 
-
     private record QuantityResult(long quantity);
 
     [Fact]
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs b/Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs
index 7a6dd99..08ce1c6 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/EventStreamTests.cs
@@ -17,7 +17,6 @@ public async Task EventStream_ShouldReceiveTwoMessages_Async()
         var completedTask = new TaskCompletionSource<bool>();
         var listenerReadySource = new TaskCompletionSource<bool>();
 
-
         var listenTask = Task.Run(async () =>
         {
             var stream = eventStream.ListenAsync(cts.Token);
@@ -52,7 +51,6 @@ public async Task EventStream_ShouldReceiveTwoMessages_Async()
         eventStream.Emit(status1);
         eventStream.Emit(status2);
 
-
         await completedTask.Task;
 
         Assert.Equal(2, receivedMessages.Count);
diff --git a/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs b/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
index 8d7ad0d..b7e37af 100644
--- a/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
+++ b/Tests/PowerSync/PowerSync.Common.Tests/TestSchema.cs
@@ -25,7 +25,6 @@ public class TestSchema
             { "email", ColumnType.TEXT }
         });
 
-
     public static Schema appSchema = new Schema(new Dictionary<string, Table>
         {
             { "assets", assets },
diff --git a/src/Common/DB/Crud/SyncStatus.cs b/src/Common/DB/Crud/SyncStatus.cs
index b83abfb..d6e17eb 100644
--- a/src/Common/DB/Crud/SyncStatus.cs
+++ b/src/Common/DB/Crud/SyncStatus.cs
@@ -2,14 +2,12 @@ namespace Common.DB.Crud;
 
 using System.Text.Json;
 
-
 public class SyncDataFlowStatus
 {
     public bool Downloading { get; set; } = false;
     public bool Uploading { get; set; } = false;    
 }
 
-
  public class SyncStatusOptions
 {
     public bool? Connected { get; set; }

From 7f795abc571a165c4a016828091c49e28953cba5 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Wed, 12 Mar 2025 15:46:33 +0200
Subject: [PATCH 23/26] Removed `src` contents that were missed in renaming of
 directories.

---
 src/Common.Tests/Common.Tests.csproj          | 27 ---------
 src/Common.Tests/PowerSyncCredentialsTests.cs | 16 ------
 .../Client/AbstractPowerSyncDatabase.cs       | 20 -------
 .../Connection/IPowerSyncBackendConnector.cs  | 23 --------
 .../Client/Connection/PowerSyncCredentials.cs |  7 ---
 src/Common/Common.csproj                      |  9 ---
 src/Common/DB/Crud/SyncStatus.cs              | 57 -------------------
 src/Common/DB/Crud/UploadQueueStatus.cs       | 17 ------
 8 files changed, 176 deletions(-)
 delete mode 100644 src/Common.Tests/Common.Tests.csproj
 delete mode 100644 src/Common.Tests/PowerSyncCredentialsTests.cs
 delete mode 100644 src/Common/Client/AbstractPowerSyncDatabase.cs
 delete mode 100644 src/Common/Client/Connection/IPowerSyncBackendConnector.cs
 delete mode 100644 src/Common/Client/Connection/PowerSyncCredentials.cs
 delete mode 100644 src/Common/Common.csproj
 delete mode 100644 src/Common/DB/Crud/SyncStatus.cs
 delete mode 100644 src/Common/DB/Crud/UploadQueueStatus.cs

diff --git a/src/Common.Tests/Common.Tests.csproj b/src/Common.Tests/Common.Tests.csproj
deleted file mode 100644
index 374178d..0000000
--- a/src/Common.Tests/Common.Tests.csproj
+++ /dev/null
@@ -1,27 +0,0 @@
-<Project Sdk="Microsoft.NET.Sdk">
-
-  <PropertyGroup>
-    <TargetFramework>net8.0</TargetFramework>
-    <ImplicitUsings>enable</ImplicitUsings>
-    <Nullable>enable</Nullable>
-
-    <IsPackable>false</IsPackable>
-    <IsTestProject>true</IsTestProject>
-  </PropertyGroup>
-
-  <ItemGroup>
-    <PackageReference Include="coverlet.collector" Version="6.0.0" />
-    <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" />
-    <PackageReference Include="xunit" Version="2.5.3" />
-    <PackageReference Include="xunit.runner.visualstudio" Version="2.5.3" />
-  </ItemGroup>
-
-  <ItemGroup>
-    <Using Include="Xunit" />
-  </ItemGroup>
-
-  <ItemGroup>
-    <ProjectReference Include="..\Common\Common.csproj" />
-  </ItemGroup>
-
-</Project>
diff --git a/src/Common.Tests/PowerSyncCredentialsTests.cs b/src/Common.Tests/PowerSyncCredentialsTests.cs
deleted file mode 100644
index 462cefb..0000000
--- a/src/Common.Tests/PowerSyncCredentialsTests.cs
+++ /dev/null
@@ -1,16 +0,0 @@
-namespace Common.Tests;
-using Common.Client.Connection;
-public class PowerSyncCredentialsTests
-{
-    [Fact]
-    public void SimpleTest()
-    {
-        var endpoint = "http://localhost";
-        var token = "token";
-        var expiresAt = new DateTime();
-        PowerSyncCredentials credentials = new PowerSyncCredentials(endpoint, token, expiresAt);
-        Assert.Equal(endpoint, credentials.Endpoint);
-        Assert.Equal(token, credentials.Token);
-        Assert.Equal(expiresAt, credentials.ExpiresAt);
-    }
-}
\ No newline at end of file
diff --git a/src/Common/Client/AbstractPowerSyncDatabase.cs b/src/Common/Client/AbstractPowerSyncDatabase.cs
deleted file mode 100644
index 990e84c..0000000
--- a/src/Common/Client/AbstractPowerSyncDatabase.cs
+++ /dev/null
@@ -1,20 +0,0 @@
-using Common.DB.Crud;
-
-namespace Common.Client;
-public abstract class AbstractPowerSyncDatabase {
-
-    // Returns true if the connection is closed.    
-    bool closed;
-    bool ready;
-
-    string sdkVersion;
-    SyncStatus syncStatus;
-
-    public AbstractPowerSyncDatabase() {
-        this.syncStatus = new SyncStatus(new SyncStatusOptions());
-        this.closed = false;
-        this.ready = false;
-
-        this.sdkVersion = "";
-    }   
-}
\ No newline at end of file
diff --git a/src/Common/Client/Connection/IPowerSyncBackendConnector.cs b/src/Common/Client/Connection/IPowerSyncBackendConnector.cs
deleted file mode 100644
index 2a32785..0000000
--- a/src/Common/Client/Connection/IPowerSyncBackendConnector.cs
+++ /dev/null
@@ -1,23 +0,0 @@
-namespace Common.Client.Connection;
-
-public interface IPowerSyncBackendConnector
-{
-    // Allows the PowerSync client to retrieve an authentication token from your backend
-    // which is used to authenticate against the PowerSync service.
-    //  
-    // This should always fetch a fresh set of credentials - don't use cached
-    // values.
-    //  
-    // Return null if the user is not signed in. Throw an error if credentials
-    // cannot be fetched due to a network error or other temporary error.
-    //
-    // This token is kept for the duration of a sync connection.
-    Task<PowerSyncCredentials?> FetchCredentials();
-
-    // Upload local changes to the app backend.
-    //
-    // Use {@link AbstractPowerSyncDatabase.getCrudBatch} to get a batch of changes to upload.
-    //
-    // Any thrown errors will result in a retry after the configured wait period (default: 5 seconds).
-    Task UploadData(AbstractPowerSyncDatabase database);
-}
diff --git a/src/Common/Client/Connection/PowerSyncCredentials.cs b/src/Common/Client/Connection/PowerSyncCredentials.cs
deleted file mode 100644
index 4deff14..0000000
--- a/src/Common/Client/Connection/PowerSyncCredentials.cs
+++ /dev/null
@@ -1,7 +0,0 @@
-namespace Common.Client.Connection;
-public class PowerSyncCredentials(string endpoint, string token, DateTime? expiresAt = null)
-{
-    public string Endpoint { get; set; } = endpoint;
-    public string Token { get; set; } = token;
-    public DateTime? ExpiresAt { get; set; } = expiresAt;
-}
\ No newline at end of file
diff --git a/src/Common/Common.csproj b/src/Common/Common.csproj
deleted file mode 100644
index bb23fb7..0000000
--- a/src/Common/Common.csproj
+++ /dev/null
@@ -1,9 +0,0 @@
-<Project Sdk="Microsoft.NET.Sdk">
-
-  <PropertyGroup>
-    <TargetFramework>net8.0</TargetFramework>
-    <ImplicitUsings>enable</ImplicitUsings>
-    <Nullable>enable</Nullable>
-  </PropertyGroup>
-
-</Project>
diff --git a/src/Common/DB/Crud/SyncStatus.cs b/src/Common/DB/Crud/SyncStatus.cs
deleted file mode 100644
index d6e17eb..0000000
--- a/src/Common/DB/Crud/SyncStatus.cs
+++ /dev/null
@@ -1,57 +0,0 @@
-namespace Common.DB.Crud;
-
-using System.Text.Json;
-
-public class SyncDataFlowStatus
-{
-    public bool Downloading { get; set; } = false;
-    public bool Uploading { get; set; } = false;    
-}
-
- public class SyncStatusOptions
-{
-    public bool? Connected { get; set; }
-    public SyncDataFlowStatus? DataFlow { get; set; }
-    public DateTime? LastSyncedAt { get; set; }
-    public bool? HasSynced { get; set; }
-}
-
- public class SyncStatus(SyncStatusOptions options)
-{
-    protected SyncStatusOptions options = options;
-
-    public bool Connected => options.Connected ?? false;
-    
-    public DateTime? LastSyncedAt => options.LastSyncedAt;
-
-    public bool? HasSynced => options.HasSynced;
-
-    public SyncDataFlowStatus DataFlowStatus => options.DataFlow ?? new SyncDataFlowStatus();
-
-    public bool IsEqual(SyncStatus status)
-    {
-        return JsonSerializer.Serialize(options) == JsonSerializer.Serialize(status.options);
-    }
-
-    public string GetMessage()
-    {
-        return $"SyncStatus<connected: {Connected}, lastSyncedAt: {LastSyncedAt}, hasSynced: {HasSynced}, " +
-                $"downloading: {DataFlowStatus.Downloading}, uploading: {DataFlowStatus.Uploading}>";
-    }
-
-    public SyncStatusOptions ToJson()
-    {
-        return new SyncStatusOptions
-        {
-            Connected = Connected,
-            DataFlow = DataFlowStatus,
-            LastSyncedAt = LastSyncedAt,
-            HasSynced = HasSynced
-        };
-    }
-
-    public override string ToString()
-    {
-        return GetMessage();
-    }
-}
\ No newline at end of file
diff --git a/src/Common/DB/Crud/UploadQueueStatus.cs b/src/Common/DB/Crud/UploadQueueStatus.cs
deleted file mode 100644
index 1925af9..0000000
--- a/src/Common/DB/Crud/UploadQueueStatus.cs
+++ /dev/null
@@ -1,17 +0,0 @@
-namespace Common.DB.Crud;
-
- public class UploadQueueStats(int count, long? size = null)
-{
-    public int Count { get; set; } = count;
-
-    public long? Size { get; set; } = size;
-
-    public override string ToString()
-    {
-        if (Size == null) {
-            return $"UploadQueueStats<count: {Count}>";
-        } else {
-            return $"UploadQueueStats<count: {Count} size: {Size / 1024.0}kB>";
-        }
-    }
-}
\ No newline at end of file

From 58daca98da76e67d1013365f0a8e1583b95bda58 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Thu, 20 Mar 2025 15:35:07 +0200
Subject: [PATCH 24/26] Added DB operations to IPowerSyncDatabase interface.

---
 .../Client/PowerSyncDatabase.cs               | 28 +++++++++++++++----
 .../PowerSync.Common/Client/SQLOpenFactory.cs |  2 +-
 2 files changed, 24 insertions(+), 6 deletions(-)

diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
index 69b69f7..575e509 100644
--- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
+++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
@@ -28,14 +28,14 @@ public class BasePowerSyncDatabaseOptions()
 
 }
 
-public abstract class DatabaseSource { }
+public interface IDatabaseSource { }
 
-public class DBAdapterSource(IDBAdapter Adapter) : DatabaseSource
+public class DBAdapterSource(IDBAdapter Adapter) : IDatabaseSource
 {
     public IDBAdapter Adapter { get; init; } = Adapter;
 }
 
-public class OpenFactorySource(ISQLOpenFactory Factory) : DatabaseSource
+public class OpenFactorySource(ISQLOpenFactory Factory) : IDatabaseSource
 {
     public ISQLOpenFactory Factory { get; init; } = Factory;
 }
@@ -45,8 +45,7 @@ public class PowerSyncDatabaseOptions() : BasePowerSyncDatabaseOptions()
     /// <summary> 
     /// Source for a SQLite database connection.
     /// </summary>
-    public DatabaseSource Database { get; set; } = null!;
-
+    public IDatabaseSource Database { get; set; } = null!;
 }
 
 public class PowerSyncDBEvent : StreamingSyncImplementationEvent
@@ -63,6 +62,25 @@ public interface IPowerSyncDatabase : IEventStream<PowerSyncDBEvent>
     public Task<CrudBatch?> GetCrudBatch(int limit);
 
     public Task<CrudTransaction?> GetNextCrudTransaction();
+
+    Task<NonQueryResult> Execute(string query, object[]? parameters = null);
+
+    Task<T[]> GetAll<T>(string sql, params object[]? parameters);
+
+    Task<T?> GetOptional<T>(string sql, params object[]? parameters);
+
+    Task<T> Get<T>(string sql, params object[]? parameters);
+
+    Task<T> ReadLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null);
+
+    Task<T> ReadTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null);
+
+    Task WriteLock(Func<ILockContext, Task> fn, DBLockOptions? options = null);
+    Task<T> WriteLock<T>(Func<ILockContext, Task<T>> fn, DBLockOptions? options = null);
+
+    Task WriteTransaction(Func<ITransaction, Task> fn, DBLockOptions? options = null);
+    Task<T> WriteTransaction<T>(Func<ITransaction, Task<T>> fn, DBLockOptions? options = null);
+
 }
 
 public class PowerSyncDatabase : EventStream<PowerSyncDBEvent>, IPowerSyncDatabase
diff --git a/PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs b/PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs
index c91abdc..518906b 100644
--- a/PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs
+++ b/PowerSync/PowerSync.Common/Client/SQLOpenFactory.cs
@@ -2,7 +2,7 @@ namespace PowerSync.Common.Client;
 
 using PowerSync.Common.DB;
 
-public class SQLOpenOptions : DatabaseSource
+public class SQLOpenOptions : IDatabaseSource
 {
     /// <summary>
     /// Filename for the database.

From 3cd8cb29a91cb55bc3c59f5946b7ce3dcbe639fc Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Thu, 20 Mar 2025 15:48:44 +0200
Subject: [PATCH 25/26] Removed todos, added to internal tracker.

---
 PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs | 10 ----------
 .../PowerSync.Common/Client/Sync/Stream/Remote.cs      |  2 --
 .../Client/Sync/Stream/StreamingSyncImplementation.cs  |  1 -
 PowerSync/PowerSync.Common/DB/Schema/Table.cs          |  3 ---
 .../PowerSync.Common/MDSQLite/MDSQLiteConnection.cs    |  5 -----
 5 files changed, 21 deletions(-)

diff --git a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
index 575e509..5d28379 100644
--- a/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
+++ b/PowerSync/PowerSync.Common/Client/PowerSyncDatabase.cs
@@ -120,9 +120,6 @@ public PowerSyncDatabase(PowerSyncDatabaseOptions options)
         }
         else if (options.Database is SQLOpenOptions openOptions)
         {
-            // TODO default to MDSQLite factory for now
-            // Can be broken out, rename this class to Abstract
-            // `this.openDBAdapter(options)`
             Database = new MDSQLiteAdapter(new MDSQLiteAdapterOptions
             {
                 Name = openOptions.DbFilename,
@@ -364,7 +361,6 @@ public async Task DisconnectAndClear()
         await Disconnect();
         await WaitForReady();
 
-        // TODO CL bool clearLocal = options?.ClearLocal ?? false;
         bool clearLocal = true;
 
         await Database.WriteTransaction(async tx =>
@@ -382,12 +378,6 @@ await Database.WriteTransaction(async tx =>
         base.Close();
         await WaitForReady();
 
-        // TODO CL
-        // if (options.Disconnect)
-        // {
-        //     await Disconnect();
-        // }
-
         syncStreamImplementation?.Close();
         BucketStorageAdapter?.Close();
 
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
index d293521..8263ab4 100644
--- a/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/Remote.cs
@@ -50,7 +50,6 @@ public Remote(IPowerSyncBackendConnector connector)
 
         credentials = await connector.FetchCredentials();
 
-        // TODO CL trailing forward slash check
         return credentials;
     }
 
@@ -155,7 +154,6 @@ private async Task<HttpRequestMessage> BuildRequest(HttpMethod method, string pa
 
         if (string.IsNullOrEmpty(credentials.Token))
         {
-            // TODO CL error status code 401
             var error = new HttpRequestException("Not signed in");
             throw error;
         }
diff --git a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
index 7b331fa..f73de84 100644
--- a/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
+++ b/PowerSync/PowerSync.Common/Client/Sync/Stream/StreamingSyncImplementation.cs
@@ -131,7 +131,6 @@ public StreamingSyncImplementation(StreamingSyncImplementationOptions options)
 
         CancellationTokenSource = null;
 
-        // TODO CL throttling
         TriggerCrudUpload = () =>
         {
             if (!SyncStatus.Connected || SyncStatus.DataFlowStatus.Uploading)
diff --git a/PowerSync/PowerSync.Common/DB/Schema/Table.cs b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
index 6901dcd..b9785d2 100644
--- a/PowerSync/PowerSync.Common/DB/Schema/Table.cs
+++ b/PowerSync/PowerSync.Common/DB/Schema/Table.cs
@@ -2,9 +2,6 @@ namespace PowerSync.Common.DB.Schema;
 
 using Newtonsoft.Json;
 
-// TODO CL Need to port this to C#
-// export const InvalidSQLCharacters = /["'%,.#\s[\]]/;
-
 public class TableOptions(
     Dictionary<string, List<string>>? indexes = null,
     bool? localOnly = null,
diff --git a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
index f0cda83..16c5c30 100644
--- a/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
+++ b/PowerSync/PowerSync.Common/MDSQLite/MDSQLiteConnection.cs
@@ -142,7 +142,6 @@ public async Task<QueryResult> ExecuteQuery(string query, object[]? parameters =
             var row = new Dictionary<string, object>();
             for (int i = 0; i < reader.FieldCount; i++)
             {
-                // TODO: What should we do with null values?
                 row[reader.GetName(i)] = reader.IsDBNull(i) ? null : reader.GetValue(i);
             }
             rows.Add(row);
@@ -164,8 +163,6 @@ public async Task<T[]> GetAll<T>(string sql, object[]? parameters = null)
 
         var items = new List<T>();
 
-        // TODO: Improve mapping errors for when the result fields don't match the target type.
-        // TODO: This conversion may be a performance bottleneck, it's the easiest mechamisn for getting result typing.
         foreach (var row in result.Rows.Array)
         {
             if (row != null)
@@ -197,8 +194,6 @@ public async Task<T[]> GetAll<T>(string sql, object[]? parameters = null)
             return default;
         }
 
-        // TODO: Improve mapping errors for when the result fields don't match the target type.
-        // TODO: This conversion may be a performance bottleneck, it's the easiest mechamisn for getting result typing.
         string json = JsonConvert.SerializeObject(firstRow);
         return JsonConvert.DeserializeObject<T>(json);
     }

From 331e60fb68d61bd08f2f39a966ef8d03ae022c09 Mon Sep 17 00:00:00 2001
From: Christiaan Landman <chriz.ek@gmail.com>
Date: Mon, 24 Mar 2025 09:51:30 +0200
Subject: [PATCH 26/26] Removed commands file.

---
 commands | 14 --------------
 1 file changed, 14 deletions(-)
 delete mode 100644 commands

diff --git a/commands b/commands
deleted file mode 100644
index 6e3c741..0000000
--- a/commands
+++ /dev/null
@@ -1,14 +0,0 @@
-dotnet new sln -n root    
-dotnet new classlib -n Common    
-dotnet sln root.sln add src/Common/Common.csproj
-dotnet add reference ../../packages/CommonUtilities/CommonUtilities.csproj
-
-dotnet pack packages/CommonUtilities -o ../local-nuget
-dotnet add package CommonUtilities --version "*"
-
-dotnet new xunit -n Common.Tests
-dotnet sln ../../root.sln add Common.Tests/Common.Tests.csproj
-
-dotnet test src/Common/Common.Tests
-dotnet test --logger "console;verbosity=detailed" src/Common/Common.Tests
-dotnet test --logger "console;verbosity=detailed"
\ No newline at end of file