gnunet-svn
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[taler-wallet-core] 02/03: new sqlite3 backend, many DB bug fixes


From: gnunet
Subject: [taler-wallet-core] 02/03: new sqlite3 backend, many DB bug fixes
Date: Fri, 20 Dec 2024 11:13:41 +0100

This is an automated email from the git hooks/post-receive script.

dold pushed a commit to branch master
in repository wallet-core.

commit 8e11ead343d7707d3a050f926f9301c01119560b
Author: Florian Dold <florian@dold.me>
AuthorDate: Wed Dec 18 01:39:34 2024 +0100

    new sqlite3 backend, many DB bug fixes
---
 Makefile                                           |   2 -
 packages/idb-bridge/check.js                       |  18 -
 packages/idb-bridge/package.json                   |   3 -
 packages/idb-bridge/src/MemoryBackend.ts           |   2 +-
 packages/idb-bridge/src/SqliteBackend.test.ts      |   4 +-
 packages/idb-bridge/src/SqliteBackend.ts           | 846 ++++++++++++---------
 packages/idb-bridge/src/backend-interface.ts       |   2 +-
 packages/idb-bridge/src/bench.ts                   |   4 +-
 packages/idb-bridge/src/bridge-idb.ts              |  67 +-
 .../src/idb-wpt-ported/idbfactory-open.test.ts     |   9 +-
 packages/idb-bridge/src/index.ts                   |  27 +-
 ...sqlite3-impl.ts => node-better-sqlite3-impl.ts} |  17 +-
 .../src/node-helper-sqlite3-impl.test.ts           |  82 ++
 .../idb-bridge/src/node-helper-sqlite3-impl.ts     | 572 ++++++++++++++
 packages/idb-bridge/src/sqlite3-interface.ts       |  14 +-
 packages/idb-bridge/src/testingdb.ts               |   8 +-
 packages/idb-bridge/taler-helper-sqlite3           | 305 ++++++++
 packages/taler-wallet-cli/Makefile                 |   3 +-
 packages/taler-wallet-core/src/host-impl.node.ts   |   4 +-
 packages/taler-wallet-core/src/host-impl.qtart.ts  |  14 +-
 20 files changed, 1563 insertions(+), 440 deletions(-)

diff --git a/Makefile b/Makefile
index 58677ac5b..bd6101293 100644
--- a/Makefile
+++ b/Makefile
@@ -276,7 +276,6 @@ install:
        $(MAKE) -C packages/merchant-backoffice-ui install-nodeps
        $(MAKE) -C packages/aml-backoffice-ui install-nodeps
        $(MAKE) -C packages/auditor-backoffice-ui install-nodeps
-       node packages/idb-bridge/check.js
 
 
 .PHONY: install-tools
@@ -287,7 +286,6 @@ install-tools:
        $(MAKE) -C packages/taler-wallet-cli install-nodeps
        $(MAKE) -C packages/anastasis-cli install-nodeps
        $(MAKE) -C packages/taler-harness install-nodeps
-       node packages/idb-bridge/check.js
 
 .PHONY: check-migration
 
diff --git a/packages/idb-bridge/check.js b/packages/idb-bridge/check.js
deleted file mode 100644
index 646676271..000000000
--- a/packages/idb-bridge/check.js
+++ /dev/null
@@ -1,18 +0,0 @@
-import bsq from "better-sqlite3";
-
-// Check if we can load the native module of better-sqlite3.
-// If not, give a nice error message.
-
-try {
-  const db = bsq(":memory:");
-} catch (e) {
-  console.log(e.message);
-  console.warn()
-  console.warn("WARNING: Unable to use better-sqlite3.");
-  console.warn("Please run \n\n  pnpm rebuild --recursive better-sqlite3 
--loglevel debug\n");
-  console.warn("to rebuild the native module.");
-  console.warn()
-  console.warn("Alternatively, check\n\n  
https://nodejs.org/en/about/previous-releases\n";);
-  console.warn("for a node version compatible with the native module.");
-  process.exit(1);
-}
diff --git a/packages/idb-bridge/package.json b/packages/idb-bridge/package.json
index 17209a62f..82c059d75 100644
--- a/packages/idb-bridge/package.json
+++ b/packages/idb-bridge/package.json
@@ -19,9 +19,6 @@
   "exports": {
     ".": {
       "default": "./lib/index.js"
-    },
-    "./node-sqlite3-bindings": {
-      "default": "./lib/node-sqlite3-impl.js"
     }
   },
   "devDependencies": {
diff --git a/packages/idb-bridge/src/MemoryBackend.ts 
b/packages/idb-bridge/src/MemoryBackend.ts
index 526920a9f..a4e3bdb7a 100644
--- a/packages/idb-bridge/src/MemoryBackend.ts
+++ b/packages/idb-bridge/src/MemoryBackend.ts
@@ -1448,7 +1448,7 @@ export class MemoryBackend implements Backend {
     }
   }
 
-  rollback(btx: DatabaseTransaction): void {
+  async rollback(btx: DatabaseTransaction): Promise<void> {
     if (this.enableTracing) {
       console.log(`TRACING: rollback`);
     }
diff --git a/packages/idb-bridge/src/SqliteBackend.test.ts 
b/packages/idb-bridge/src/SqliteBackend.test.ts
index 612cb9d4b..e43b1fa8b 100644
--- a/packages/idb-bridge/src/SqliteBackend.test.ts
+++ b/packages/idb-bridge/src/SqliteBackend.test.ts
@@ -19,7 +19,7 @@ import { createSqliteBackend } from "./SqliteBackend.js";
 import { ResultLevel, StoreLevel } from "./backend-interface.js";
 import { BridgeIDBKeyRange } from "./bridge-idb.js";
 import * as fs from "node:fs";
-import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+import { createNodeBetterSqlite3Impl } from "./node-better-sqlite3-impl.js";
 
 test("sqlite3 backend", async (t) => {
   const filename = "mytestdb.sqlite3";
@@ -29,7 +29,7 @@ test("sqlite3 backend", async (t) => {
     // Do nothing.
   }
   try {
-    const sqlite3Impl = await createNodeSqlite3Impl();
+    const sqlite3Impl = await createNodeBetterSqlite3Impl();
     const backend = await createSqliteBackend(sqlite3Impl, {
       filename,
     });
diff --git a/packages/idb-bridge/src/SqliteBackend.ts 
b/packages/idb-bridge/src/SqliteBackend.ts
index 8213de366..cecf0794a 100644
--- a/packages/idb-bridge/src/SqliteBackend.ts
+++ b/packages/idb-bridge/src/SqliteBackend.ts
@@ -77,28 +77,43 @@ interface ConnectionInfo {
   // Database that the connection has
   // connected to.
   databaseName: string;
+
+  storeMap: Map<string, MyStoreMeta>;
+  storeList: MyStoreMeta[];
 }
 
 interface TransactionInfo {
   connectionCookie: string;
 }
 
-interface ScopeIndexInfo {
-  indexId: SqliteRowid;
+interface MyIndexMeta {
+  indexId: SqliteRowid | undefined;
   keyPath: IDBKeyPath | IDBKeyPath[];
   multiEntry: boolean;
   unique: boolean;
+
+  currentName: string | undefined;
+  nameDirty: boolean;
 }
 
-interface ScopeInfo {
+interface MyStoreMeta {
   /**
    * Internal ID of the object store.
    * Used for fast retrieval, since it's the
    * primary key / rowid of the sqlite table.
    */
-  objectStoreId: SqliteRowid;
+  objectStoreId: SqliteRowid | undefined;
+
+  keyPath: string | string[] | null;
+
+  autoIncrement: boolean;
 
-  indexMap: Map<string, ScopeIndexInfo>;
+  indexList: MyIndexMeta[];
+  indexMap: Map<string, MyIndexMeta>;
+
+  currentName: string | undefined;
+
+  nameDirty: boolean;
 }
 
 interface IndexIterPos {
@@ -204,7 +219,7 @@ export function expectDbNumber(
   assertDbInvariant(typeof resultRow === "object" && resultRow != null);
   const res = (resultRow as any)[name];
   if (typeof res !== "number") {
-    throw Error("unexpected type from database");
+    throw Error("unexpected type from database (expected number)");
   }
   return res;
 }
@@ -213,7 +228,7 @@ export function expectDbString(resultRow: unknown, name: 
string): string {
   assertDbInvariant(typeof resultRow === "object" && resultRow != null);
   const res = (resultRow as any)[name];
   if (typeof res !== "string") {
-    throw Error("unexpected type from database");
+    throw Error("unexpected type from database (expected string)");
   }
   return res;
 }
@@ -228,7 +243,7 @@ export function expectDbStringOrNull(
     return null;
   }
   if (typeof res !== "string") {
-    throw Error("unexpected type from database");
+    throw Error("unexpected type from database (expected string or null)");
   }
   return res;
 }
@@ -255,16 +270,8 @@ export class SqliteBackend implements Backend {
    */
   private transactionDoneCond: AsyncCondition = new AsyncCondition();
 
-  /**
-   * Is the connection blocked because either an open request
-   * or delete request is being processed?
-   */
-  private connectionBlocked: boolean = false;
-
   private txLevel: TransactionLevel = TransactionLevel.None;
 
-  private txScope: Map<string, ScopeInfo> = new Map();
-
   private connectionMap: Map<string, ConnectionInfo> = new Map();
 
   private transactionMap: Map<string, TransactionInfo> = new Map();
@@ -278,12 +285,12 @@ export class SqliteBackend implements Backend {
     public db: Sqlite3Database,
   ) {}
 
-  private _prep(sql: string): Sqlite3Statement {
+  private async _prep(sql: string): Promise<Sqlite3Statement> {
     const stmt = this.sqlPrepCache.get(sql);
     if (stmt) {
       return stmt;
     }
-    const newStmt = this.db.prepare(sql);
+    const newStmt = await this.db.prepare(sql);
     this.sqlPrepCache.set(sql, newStmt);
     return newStmt;
   }
@@ -303,7 +310,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Read) {
       throw Error("only allowed in read transaction");
     }
-    const scopeInfo = this.txScope.get(req.objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(req.objectStoreName);
     if (!scopeInfo) {
       throw Error("object store not in scope");
     }
@@ -319,6 +326,8 @@ export class SqliteBackend implements Backend {
       }
     }
 
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+
     if (this.enableTracing) {
       console.log(
         `querying index os=${req.objectStoreName}, idx=${req.indexName}, 
direction=${req.direction}`,
@@ -331,7 +340,7 @@ export class SqliteBackend implements Backend {
     const queryUnique =
       req.direction === "nextunique" || req.direction === "prevunique";
 
-    const indexId = indexInfo.indexId;
+    const indexId = await this._provideIndex(connInfo, scopeInfo, indexInfo);
     const indexUnique = indexInfo.unique;
 
     let numResults = 0;
@@ -345,7 +354,7 @@ export class SqliteBackend implements Backend {
 
     const backendThis = this;
 
-    function packResult() {
+    async function packResult() {
       if (req.resultLevel > ResultLevel.OnlyCount) {
         for (let i = 0; i < encPrimaryKeys.length; i++) {
           primaryKeys.push(deserializeKey(encPrimaryKeys[i]));
@@ -355,8 +364,8 @@ export class SqliteBackend implements Backend {
         }
         if (req.resultLevel === ResultLevel.Full) {
           for (let i = 0; i < encPrimaryKeys.length; i++) {
-            const val = backendThis._getObjectValue(
-              scopeInfo!.objectStoreId,
+            const val = await backendThis._getObjectValue(
+              objectStoreId,
               encPrimaryKeys[i],
             );
             if (!val) {
@@ -390,7 +399,7 @@ export class SqliteBackend implements Backend {
       };
     }
 
-    let currentPos = this._startIndex({
+    let currentPos = await this._startIndex({
       indexId,
       indexUnique,
       queryUnique,
@@ -412,7 +421,7 @@ export class SqliteBackend implements Backend {
       const advancePrimaryKey = req.advancePrimaryKey
         ? serializeKey(req.advancePrimaryKey)
         : undefined;
-      currentPos = this._continueIndex({
+      currentPos = await this._continueIndex({
         indexId,
         indexUnique,
         queryUnique,
@@ -437,7 +446,7 @@ export class SqliteBackend implements Backend {
       const lastObjectPosition = req.lastObjectStorePosition
         ? serializeKey(req.lastObjectStorePosition)
         : undefined;
-      currentPos = this._continueIndex({
+      currentPos = await this._continueIndex({
         indexId,
         indexUnique,
         queryUnique,
@@ -468,7 +477,7 @@ export class SqliteBackend implements Backend {
       if (targetKeyObj != null) {
         const targetKey = serializeKey(targetKeyObj);
         const inclusive = forward ? !req.range.lowerOpen : 
!req.range.upperOpen;
-        currentPos = this._continueIndex({
+        currentPos = await this._continueIndex({
           indexId,
           indexUnique,
           queryUnique,
@@ -515,7 +524,7 @@ export class SqliteBackend implements Backend {
         encIndexKeys.push(currentPos.indexPos);
       }
 
-      currentPos = backendThis._continueIndex({
+      currentPos = await backendThis._continueIndex({
         indexId,
         indexUnique,
         forward,
@@ -534,7 +543,7 @@ export class SqliteBackend implements Backend {
   // in the direction specified by "forward".
   // Do nothing if the current position is already past the
   // target position.
-  _continueIndex(req: {
+  async _continueIndex(req: {
     indexId: SqliteRowid;
     indexUnique: boolean;
     queryUnique: boolean;
@@ -543,7 +552,7 @@ export class SqliteBackend implements Backend {
     currentPos: IndexIterPos | null | undefined;
     targetIndexKey: Uint8Array;
     targetObjectKey?: Uint8Array;
-  }): IndexIterPos | undefined {
+  }): Promise<IndexIterPos | undefined> {
     const currentPos = req.currentPos;
     const forward = req.forward;
     const dir = forward ? 1 : -1;
@@ -580,50 +589,52 @@ export class SqliteBackend implements Backend {
     if (req.indexUnique) {
       if (req.forward) {
         if (req.inclusive) {
-          stmt = this._prep(sqlUniqueIndexDataContinueForwardInclusive);
+          stmt = await this._prep(sqlUniqueIndexDataContinueForwardInclusive);
         } else {
-          stmt = this._prep(sqlUniqueIndexDataContinueForwardStrict);
+          stmt = await this._prep(sqlUniqueIndexDataContinueForwardStrict);
         }
       } else {
         if (req.inclusive) {
-          stmt = this._prep(sqlUniqueIndexDataContinueBackwardInclusive);
+          stmt = await this._prep(sqlUniqueIndexDataContinueBackwardInclusive);
         } else {
-          stmt = this._prep(sqlUniqueIndexDataContinueBackwardStrict);
+          stmt = await this._prep(sqlUniqueIndexDataContinueBackwardStrict);
         }
       }
     } else {
       if (req.forward) {
         if (req.queryUnique || req.targetObjectKey == null) {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueForwardInclusiveUnique);
+            stmt = await 
this._prep(sqlIndexDataContinueForwardInclusiveUnique);
           } else {
-            stmt = this._prep(sqlIndexDataContinueForwardStrictUnique);
+            stmt = await this._prep(sqlIndexDataContinueForwardStrictUnique);
           }
         } else {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueForwardInclusive);
+            stmt = await this._prep(sqlIndexDataContinueForwardInclusive);
           } else {
-            stmt = this._prep(sqlIndexDataContinueForwardStrict);
+            stmt = await this._prep(sqlIndexDataContinueForwardStrict);
           }
         }
       } else {
         if (req.queryUnique || req.targetObjectKey == null) {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueBackwardInclusiveUnique);
+            stmt = await this._prep(
+              sqlIndexDataContinueBackwardInclusiveUnique,
+            );
           } else {
-            stmt = this._prep(sqlIndexDataContinueBackwardStrictUnique);
+            stmt = await this._prep(sqlIndexDataContinueBackwardStrictUnique);
           }
         } else {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueBackwardInclusive);
+            stmt = await this._prep(sqlIndexDataContinueBackwardInclusive);
           } else {
-            stmt = this._prep(sqlIndexDataContinueBackwardStrict);
+            stmt = await this._prep(sqlIndexDataContinueBackwardStrict);
           }
         }
       }
     }
 
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       index_id: req.indexId,
       index_key: req.targetIndexKey,
       object_key: req.targetObjectKey,
@@ -653,32 +664,32 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  _startIndex(req: {
+  async _startIndex(req: {
     indexId: SqliteRowid;
     indexUnique: boolean;
     queryUnique: boolean;
     forward: boolean;
-  }): IndexIterPos | undefined {
+  }): Promise<IndexIterPos | undefined> {
     let stmt: Sqlite3Statement;
     if (req.indexUnique) {
       if (req.forward) {
-        stmt = this._prep(sqlUniqueIndexDataStartForward);
+        stmt = await this._prep(sqlUniqueIndexDataStartForward);
       } else {
-        stmt = this._prep(sqlUniqueIndexDataStartBackward);
+        stmt = await this._prep(sqlUniqueIndexDataStartBackward);
       }
     } else {
       if (req.forward) {
-        stmt = this._prep(sqlIndexDataStartForward);
+        stmt = await this._prep(sqlIndexDataStartForward);
       } else {
         if (req.queryUnique) {
-          stmt = this._prep(sqlIndexDataStartBackwardUnique);
+          stmt = await this._prep(sqlIndexDataStartBackwardUnique);
         } else {
-          stmt = this._prep(sqlIndexDataStartBackward);
+          stmt = await this._prep(sqlIndexDataStartBackward);
         }
       }
     }
 
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       index_id: req.indexId,
     });
 
@@ -715,7 +726,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Read) {
       throw Error("only allowed in read transaction");
     }
-    const scopeInfo = this.txScope.get(req.objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(req.objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -727,12 +738,14 @@ export class SqliteBackend implements Backend {
     const forward: boolean =
       req.direction === "next" || req.direction === "nextunique";
 
-    let currentKey = this._startObjectKey(scopeInfo.objectStoreId, forward);
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+
+    let currentKey = await this._startObjectKey(objectStoreId, forward);
 
     if (req.advancePrimaryKey != null) {
       const targetKey = serializeKey(req.advancePrimaryKey);
-      currentKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currentKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         forward,
         inclusive: true,
         currentKey,
@@ -742,8 +755,8 @@ export class SqliteBackend implements Backend {
 
     if (req.lastObjectStorePosition != null) {
       const targetKey = serializeKey(req.lastObjectStorePosition);
-      currentKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currentKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         forward,
         inclusive: false,
         currentKey,
@@ -756,8 +769,8 @@ export class SqliteBackend implements Backend {
       if (targetKeyObj != null) {
         const targetKey = serializeKey(targetKeyObj);
         const inclusive = forward ? !req.range.lowerOpen : 
!req.range.upperOpen;
-        currentKey = this._continueObjectKey({
-          objectStoreId: scopeInfo.objectStoreId,
+        currentKey = await this._continueObjectKey({
+          objectStoreId: objectStoreId,
           forward,
           inclusive,
           currentKey,
@@ -790,8 +803,8 @@ export class SqliteBackend implements Backend {
         encPrimaryKeys.push(currentKey);
       }
 
-      currentKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currentKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         forward,
         inclusive: false,
         currentKey: null,
@@ -805,8 +818,8 @@ export class SqliteBackend implements Backend {
       }
       if (req.resultLevel === ResultLevel.Full) {
         for (let i = 0; i < encPrimaryKeys.length; i++) {
-          const val = this._getObjectValue(
-            scopeInfo.objectStoreId,
+          const val = await this._getObjectValue(
+            objectStoreId,
             encPrimaryKeys[i],
           );
           if (!val) {
@@ -834,17 +847,17 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  _startObjectKey(
+  async _startObjectKey(
     objectStoreId: number | bigint,
     forward: boolean,
-  ): Uint8Array | null {
+  ): Promise<Uint8Array | null> {
     let stmt: Sqlite3Statement;
     if (forward) {
-      stmt = this._prep(sqlObjectDataStartForward);
+      stmt = await this._prep(sqlObjectDataStartForward);
     } else {
-      stmt = this._prep(sqlObjectDataStartBackward);
+      stmt = await this._prep(sqlObjectDataStartBackward);
     }
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       object_store_id: objectStoreId,
     });
     if (!res) {
@@ -862,13 +875,13 @@ export class SqliteBackend implements Backend {
 
   // Result *must* be past targetKey in the direction
   // specified by "forward".
-  _continueObjectKey(req: {
+  async _continueObjectKey(req: {
     objectStoreId: number | bigint;
     forward: boolean;
     currentKey: Uint8Array | null;
     targetKey: Uint8Array;
     inclusive: boolean;
-  }): Uint8Array | null {
+  }): Promise<Uint8Array | null> {
     const { forward, currentKey, targetKey } = req;
     const dir = forward ? 1 : -1;
     if (currentKey) {
@@ -885,19 +898,19 @@ export class SqliteBackend implements Backend {
 
     if (req.inclusive) {
       if (req.forward) {
-        stmt = this._prep(sqlObjectDataContinueForwardInclusive);
+        stmt = await this._prep(sqlObjectDataContinueForwardInclusive);
       } else {
-        stmt = this._prep(sqlObjectDataContinueBackwardInclusive);
+        stmt = await this._prep(sqlObjectDataContinueBackwardInclusive);
       }
     } else {
       if (req.forward) {
-        stmt = this._prep(sqlObjectDataContinueForward);
+        stmt = await this._prep(sqlObjectDataContinueForward);
       } else {
-        stmt = this._prep(sqlObjectDataContinueBackward);
+        stmt = await this._prep(sqlObjectDataContinueBackward);
       }
     }
 
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       object_store_id: req.objectStoreId,
       x: req.targetKey,
     });
@@ -916,12 +929,12 @@ export class SqliteBackend implements Backend {
     return rkey;
   }
 
-  _getObjectValue(
+  async _getObjectValue(
     objectStoreId: number | bigint,
     key: Uint8Array,
-  ): string | undefined {
-    const stmt = this._prep(sqlObjectDataValueFromKey);
-    const res = stmt.getFirst({
+  ): Promise<string | undefined> {
+    const stmt = await this._prep(sqlObjectDataValueFromKey);
+    const res = await stmt.getFirst({
       object_store_id: objectStoreId,
       key: key,
     });
@@ -943,30 +956,14 @@ export class SqliteBackend implements Backend {
     if (!connInfo) {
       throw Error("connection not found");
     }
-    const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
-      name: objectStoreName,
-      database_name: connInfo.databaseName,
-    });
-    if (!objRes) {
-      throw Error("object store not found");
-    }
-    const objectStoreId = expectDbNumber(objRes, "id");
-    const keyPath = deserializeKeyPath(
-      expectDbStringOrNull(objRes, "key_path"),
-    );
-    const autoInc = expectDbNumber(objRes, "auto_increment");
-    const indexSet: string[] = [];
-    const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
-      object_store_id: objectStoreId,
-    });
-    for (const idxInfo of indexRes) {
-      const indexName = expectDbString(idxInfo, "name");
-      indexSet.push(indexName);
+    const storeMeta = connInfo.storeMap.get(objectStoreName);
+    if (!storeMeta) {
+      return undefined;
     }
     return {
-      keyPath,
-      autoIncrement: autoInc != 0,
-      indexSet,
+      keyPath: storeMeta.keyPath,
+      autoIncrement: storeMeta.autoIncrement,
+      indexSet: [...storeMeta.indexMap.keys()],
     };
   }
 
@@ -980,40 +977,23 @@ export class SqliteBackend implements Backend {
     if (!connInfo) {
       throw Error("connection not found");
     }
-    const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
-      name: objectStoreName,
-      database_name: connInfo.databaseName,
-    });
-    if (!objRes) {
+    const storeMeta = connInfo.storeMap.get(objectStoreName);
+    if (!storeMeta) {
       throw Error("object store not found");
     }
-    const objectStoreId = expectDbNumber(objRes, "id");
-    const idxInfo = this._prep(sqlGetIndexByName).getFirst({
-      object_store_id: objectStoreId,
-      name: indexName,
-    });
-    if (!idxInfo) {
-      throw Error(
-        `index ${indexName} on object store ${objectStoreName} not found`,
-      );
-    }
-    const indexUnique = expectDbNumber(idxInfo, "unique_index");
-    const indexMultiEntry = expectDbNumber(idxInfo, "multientry");
-    const indexKeyPath = deserializeKeyPath(
-      expectDbString(idxInfo, "key_path"),
-    );
-    if (!indexKeyPath) {
-      throw Error("db inconsistent");
+    const indexMeta = storeMeta.indexMap.get(indexName);
+    if (!indexMeta) {
+      return undefined;
     }
     return {
-      keyPath: indexKeyPath,
-      multiEntry: indexMultiEntry != 0,
-      unique: indexUnique != 0,
+      keyPath: indexMeta.keyPath,
+      multiEntry: indexMeta.multiEntry,
+      unique: indexMeta.unique,
     };
   }
 
   async getDatabases(): Promise<BridgeIDBDatabaseInfo[]> {
-    const dbList = this._prep(sqlListDatabases).getAll();
+    const dbList = await (await this._prep(sqlListDatabases)).getAll();
     let res: BridgeIDBDatabaseInfo[] = [];
     for (const r of dbList) {
       res.push({
@@ -1025,9 +1005,10 @@ export class SqliteBackend implements Backend {
     return res;
   }
 
-  private _loadObjectStoreNames(databaseName: string): string[] {
+  private async _loadObjectStoreNames(databaseName: string): Promise<string[]> 
{
     const objectStoreNames: string[] = [];
-    const storesRes = this._prep(sqlGetObjectStoresByDatabase).getAll({
+    const stmt = await this._prep(sqlGetObjectStoresByDatabase);
+    const storesRes = await stmt.getAll({
       database_name: databaseName,
     });
     for (const res of storesRes) {
@@ -1040,38 +1021,77 @@ export class SqliteBackend implements Backend {
     return objectStoreNames;
   }
 
+  async _runSqlBegin(): Promise<void> {
+    const stmt = await this._prep(sqlBegin);
+    await stmt.run();
+  }
+
+  async _runSqlCommit(): Promise<void> {
+    const stmt = await this._prep(sqlCommit);
+    await stmt.run();
+  }
+
+  async _runSqlGetDatabaseVersion(
+    databaseName: string,
+  ): Promise<number | undefined> {
+    const versionRes = await (await 
this._prep(sqlGetDatabaseVersion)).getFirst({
+      name: databaseName,
+    });
+    if (versionRes == undefined) {
+      return undefined;
+    }
+    const verNum = expectDbNumber(versionRes, "version");
+    assertDbInvariant(typeof verNum === "number");
+    return verNum;
+  }
+
+  async _runSqlCreateDatabase(databaseName: string): Promise<void> {
+    const stmt = await this._prep(sqlCreateDatabase);
+    await stmt.run({ name: databaseName });
+  }
+
   async connectDatabase(databaseName: string): Promise<ConnectResult> {
     const connectionId = this.connectionIdCounter++;
     const connectionCookie = `connection-${connectionId}`;
 
     // Wait until no transaction is active anymore.
     while (1) {
+      if (this.enableTracing) {
+        console.log(`connectDatabase - txLevel is ${this.txLevel}`);
+      }
       if (this.txLevel == TransactionLevel.None) {
         break;
       }
       await this.transactionDoneCond.wait();
     }
 
-    this._prep(sqlBegin).run();
-    const versionRes = this._prep(sqlGetDatabaseVersion).getFirst({
-      name: databaseName,
-    });
-    let ver: number;
-    if (versionRes == undefined) {
-      this._prep(sqlCreateDatabase).run({ name: databaseName });
+    this.txLevel = TransactionLevel.Write;
+
+    await this._runSqlBegin();
+    let ver = await this._runSqlGetDatabaseVersion(databaseName);
+    if (ver == null) {
+      await this._runSqlCreateDatabase(databaseName);
       ver = 0;
-    } else {
-      const verNum = expectDbNumber(versionRes, "version");
-      assertDbInvariant(typeof verNum === "number");
-      ver = verNum;
     }
-    const objectStoreNames: string[] = 
this._loadObjectStoreNames(databaseName);
 
-    this._prep(sqlCommit).run();
+    const objectStoreNames: string[] =
+      await this._loadObjectStoreNames(databaseName);
+    await this._runSqlCommit();
 
-    this.connectionMap.set(connectionCookie, {
+    const connInfo = {
       databaseName: databaseName,
-    });
+      storeList: [],
+      storeMap: new Map(),
+    };
+
+    this.connectionMap.set(connectionCookie, connInfo);
+
+    for (const storeName of objectStoreNames) {
+      await this._loadScopeInfo(connInfo, storeName);
+    }
+
+    this.txLevel = TransactionLevel.None;
+    this.transactionDoneCond.trigger();
 
     return {
       conn: {
@@ -1082,8 +1102,11 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  private _loadScopeInfo(connInfo: ConnectionInfo, storeName: string): void {
-    const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+  private async _loadScopeInfo(
+    connInfo: ConnectionInfo,
+    storeName: string,
+  ): Promise<void> {
+    const objRes = await (await 
this._prep(sqlGetObjectStoreMetaByName)).getFirst({
       name: storeName,
       database_name: connInfo.databaseName,
     });
@@ -1091,13 +1114,18 @@ export class SqliteBackend implements Backend {
       throw Error("object store not found");
     }
     const objectStoreId = expectDbNumber(objRes, "id");
-    const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
+    const objectStoreAutoIncrement = expectDbNumber(objRes, "auto_increment");
+    const objectStoreKeyPath = deserializeKeyPath(
+      expectDbStringOrNull(objRes, "key_path"),
+    );
+    const indexRes = await (await 
this._prep(sqlGetIndexesByObjectStoreId)).getAll({
       object_store_id: objectStoreId,
     });
     if (!indexRes) {
       throw Error("db inconsistent");
     }
-    const indexMap = new Map<string, ScopeIndexInfo>();
+    const indexList: MyIndexMeta[] = [];
+    const indexMap = new Map<string, MyIndexMeta>();
     for (const idxInfo of indexRes) {
       const indexId = expectDbNumber(idxInfo, "id");
       const indexName = expectDbString(idxInfo, "name");
@@ -1109,17 +1137,28 @@ export class SqliteBackend implements Backend {
       if (!indexKeyPath) {
         throw Error("db inconsistent");
       }
-      indexMap.set(indexName, {
+      const indexMeta: MyIndexMeta = {
         indexId,
         keyPath: indexKeyPath,
         multiEntry: indexMultiEntry != 0,
         unique: indexUnique != 0,
-      });
+        currentName: indexName,
+        nameDirty: false,
+      };
+      indexList.push(indexMeta);
+      indexMap.set(indexName, indexMeta);
     }
-    this.txScope.set(storeName, {
+    const storeMeta: MyStoreMeta = {
       objectStoreId,
       indexMap,
-    });
+      indexList,
+      autoIncrement: objectStoreAutoIncrement != 0,
+      keyPath: objectStoreKeyPath,
+      currentName: storeName,
+      nameDirty: false,
+    };
+    connInfo.storeList.push(storeMeta);
+    connInfo.storeMap.set(storeName, storeMeta);
   }
 
   async beginTransaction(
@@ -1148,29 +1187,20 @@ export class SqliteBackend implements Backend {
       }
     }
 
-    this._prep(sqlBegin).run();
     if (mode === "readonly") {
       this.txLevel = TransactionLevel.Read;
     } else if (mode === "readwrite") {
       this.txLevel = TransactionLevel.Write;
+    } else {
+      throw Error("not supported");
     }
 
+    await this._runSqlBegin();
+
     this.transactionMap.set(transactionCookie, {
       connectionCookie: conn.connectionCookie,
     });
 
-    // FIXME: We should check this
-    // if (this.txScope.size != 0) {
-    //   // Something didn't clean up!
-    //   throw Error("scope not empty");
-    // }
-    this.txScope.clear();
-
-    // FIXME: Use cached info from connection?
-    for (const storeName of objectStores) {
-      this._loadScopeInfo(connInfo, storeName);
-    }
-
     return {
       transactionCookie,
     };
@@ -1198,41 +1228,34 @@ export class SqliteBackend implements Backend {
       await this.transactionDoneCond.wait();
     }
 
-    // FIXME: We should check this
-    // if (this.txScope.size != 0) {
-    //   // Something didn't clean up!
-    //   throw Error("scope not empty");
-    // }
-    this.txScope.clear();
-
     if (this.enableTracing) {
       console.log(`version change transaction unblocked`);
     }
 
-    this._prep(sqlBegin).run();
     this.txLevel = TransactionLevel.VersionChange;
-
     this.transactionMap.set(transactionCookie, {
       connectionCookie: conn.connectionCookie,
     });
 
-    this._prep(sqlUpdateDbVersion).run({
-      name: connInfo.databaseName,
-      version: newVersion,
-    });
-
-    const objectStoreNames = this._loadObjectStoreNames(connInfo.databaseName);
-
-    // FIXME: Use cached info from connection?
-    for (const storeName of objectStoreNames) {
-      this._loadScopeInfo(connInfo, storeName);
-    }
+    await this._runSqlBegin();
+    await this._runSqlUpdateDbVersion(connInfo.databaseName, newVersion);
 
     return {
       transactionCookie,
     };
   }
 
+  async _runSqlUpdateDbVersion(
+    databaseName: string,
+    newVersion: number,
+  ): Promise<void> {
+    const stmt = await this._prep(sqlUpdateDbVersion);
+    await stmt.run({
+      name: databaseName,
+      version: newVersion,
+    });
+  }
+
   async deleteDatabase(databaseName: string): Promise<void> {
     // FIXME: Wait until connection queue is not blocked
     // FIXME: To properly implement the spec semantics, maybe
@@ -1242,10 +1265,14 @@ export class SqliteBackend implements Backend {
       await this.transactionDoneCond.wait();
     }
 
-    this._prep(sqlBegin).run();
-    const objectStoreNames = this._loadObjectStoreNames(databaseName);
+    this.txLevel = TransactionLevel.VersionChange;
+
+    await this._runSqlBegin();
+
+    const objectStoreNames = await this._loadObjectStoreNames(databaseName);
+
     for (const storeName of objectStoreNames) {
-      const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+      const objRes = await (await 
this._prep(sqlGetObjectStoreMetaByName)).getFirst({
         name: storeName,
         database_name: databaseName,
       });
@@ -1253,13 +1280,13 @@ export class SqliteBackend implements Backend {
         throw Error("object store not found");
       }
       const objectStoreId = expectDbNumber(objRes, "id");
-      const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
+      const indexRes = await (await 
this._prep(sqlGetIndexesByObjectStoreId)).getAll({
         object_store_id: objectStoreId,
       });
       if (!indexRes) {
         throw Error("db inconsistent");
       }
-      const indexMap = new Map<string, ScopeIndexInfo>();
+      const indexList: MyIndexMeta[] = [];
       for (const idxInfo of indexRes) {
         const indexId = expectDbNumber(idxInfo, "id");
         const indexName = expectDbString(idxInfo, "name");
@@ -1271,43 +1298,45 @@ export class SqliteBackend implements Backend {
         if (!indexKeyPath) {
           throw Error("db inconsistent");
         }
-        indexMap.set(indexName, {
+        const indexMeta: MyIndexMeta = {
           indexId,
           keyPath: indexKeyPath,
           multiEntry: indexMultiEntry != 0,
           unique: indexUnique != 0,
-        });
+          currentName: indexName,
+          nameDirty: false,
+        };
+        indexList.push(indexMeta);
       }
-      this.txScope.set(storeName, {
-        objectStoreId,
-        indexMap,
-      });
 
-      for (const indexInfo of indexMap.values()) {
+      for (const indexInfo of indexList) {
         let stmt: Sqlite3Statement;
         if (indexInfo.unique) {
-          stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
+          stmt = await this._prep(sqlIUniqueIndexDataDeleteAll);
         } else {
-          stmt = this._prep(sqlIndexDataDeleteAll);
+          stmt = await this._prep(sqlIndexDataDeleteAll);
         }
-        stmt.run({
+        await stmt.run({
           index_id: indexInfo.indexId,
         });
-        this._prep(sqlIndexDelete).run({
+        await (await this._prep(sqlIndexDelete)).run({
           index_id: indexInfo.indexId,
         });
       }
-      this._prep(sqlObjectDataDeleteAll).run({
+      await (await this._prep(sqlObjectDataDeleteAll)).run({
         object_store_id: objectStoreId,
       });
-      this._prep(sqlObjectStoreDelete).run({
+      await (await this._prep(sqlObjectStoreDelete)).run({
         object_store_id: objectStoreId,
       });
     }
-    this._prep(sqlDeleteDatabase).run({
+    await (await this._prep(sqlDeleteDatabase)).run({
       name: databaseName,
     });
-    this._prep(sqlCommit).run();
+    await (await this._prep(sqlCommit)).run();
+
+    this.txLevel = TransactionLevel.None;
+    this.transactionDoneCond.trigger();
   }
 
   async close(db: DatabaseConnection): Promise<void> {
@@ -1315,10 +1344,13 @@ export class SqliteBackend implements Backend {
     if (!connInfo) {
       throw Error("connection not found");
     }
-    // FIXME: What if we're in a transaction? Does the backend interface allow 
this?
-    // if (this.txLevel !== TransactionLevel.None) {
-    //   throw Error("can't close while in transaction");
-    // }
+    // Wait until no transaction is active anymore.
+    while (1) {
+      if (this.txLevel == TransactionLevel.None) {
+        break;
+      }
+      await this.transactionDoneCond.wait();
+    }
     if (this.enableTracing) {
       console.log(`closing connection ${db.connectionCookie}`);
     }
@@ -1342,16 +1374,14 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(oldName);
-    if (!scopeInfo) {
+    const storeMeta = connInfo.storeMap.get(oldName);
+    if (!storeMeta) {
       throw Error("object store not found");
     }
-    this.txScope.delete(oldName);
-    this.txScope.set(newName, scopeInfo);
-    this._prep(sqlRenameObjectStore).run({
-      object_store_id: scopeInfo.objectStoreId,
-      name: newName,
-    });
+    connInfo.storeMap.delete(oldName);
+    connInfo.storeMap.set(newName, storeMeta);
+    storeMeta.currentName = newName;
+    storeMeta.nameDirty = true;
   }
 
   renameIndex(
@@ -1369,7 +1399,7 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error("object store not found");
     }
@@ -1380,9 +1410,30 @@ export class SqliteBackend implements Backend {
     // FIXME: Would also be much nicer with numeric UID handles
     scopeInfo.indexMap.delete(oldIndexName);
     scopeInfo.indexMap.set(newIndexName, indexInfo);
-    this._prep(sqlRenameIndex).run({
-      index_id: indexInfo.indexId,
-      name: newIndexName,
+    scopeInfo.nameDirty = true;
+    scopeInfo.currentName = newIndexName;
+  }
+
+  async _doDeleteObjectStore(scopeInfo: MyStoreMeta): Promise<void> {
+    for (const indexInfo of scopeInfo.indexMap.values()) {
+      let stmt: Sqlite3Statement;
+      if (indexInfo.unique) {
+        stmt = await this._prep(sqlIUniqueIndexDataDeleteAll);
+      } else {
+        stmt = await this._prep(sqlIndexDataDeleteAll);
+      }
+      await stmt.run({
+        index_id: indexInfo.indexId,
+      });
+      await (await this._prep(sqlIndexDelete)).run({
+        index_id: indexInfo.indexId,
+      });
+    }
+    await (await this._prep(sqlObjectDataDeleteAll)).run({
+      object_store_id: scopeInfo.objectStoreId,
+    });
+    await (await this._prep(sqlObjectStoreDelete)).run({
+      object_store_id: scopeInfo.objectStoreId,
     });
   }
 
@@ -1396,31 +1447,31 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(name);
+    const scopeInfo = connInfo.storeMap.get(name);
     if (!scopeInfo) {
       throw Error("object store not found");
     }
-    for (const indexInfo of scopeInfo.indexMap.values()) {
-      let stmt: Sqlite3Statement;
-      if (indexInfo.unique) {
-        stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
-      } else {
-        stmt = this._prep(sqlIndexDataDeleteAll);
-      }
-      stmt.run({
-        index_id: indexInfo.indexId,
-      });
-      this._prep(sqlIndexDelete).run({
-        index_id: indexInfo.indexId,
-      });
+    const storeMeta = connInfo.storeMap.get(name);
+    if (!storeMeta) {
+      throw Error("object store does not exist");
     }
-    this._prep(sqlObjectDataDeleteAll).run({
-      object_store_id: scopeInfo.objectStoreId,
+    connInfo.storeMap.delete(name);
+    storeMeta.currentName = undefined;
+  }
+
+  async _doDeleteIndex(indexMeta: MyIndexMeta): Promise<void> {
+    let stmt: Sqlite3Statement;
+    if (indexMeta.unique) {
+      stmt = await this._prep(sqlIUniqueIndexDataDeleteAll);
+    } else {
+      stmt = await this._prep(sqlIndexDataDeleteAll);
+    }
+    await stmt.run({
+      index_id: indexMeta.indexId,
     });
-    this._prep(sqlObjectStoreDelete).run({
-      object_store_id: scopeInfo.objectStoreId,
+    await (await this._prep(sqlIndexDelete)).run({
+      index_id: indexMeta.indexId,
     });
-    this.txScope.delete(name);
   }
 
   deleteIndex(
@@ -1437,27 +1488,16 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(objectStoreName);
-    if (!scopeInfo) {
+    const storeMeta = connInfo.storeMap.get(objectStoreName);
+    if (!storeMeta) {
       throw Error("object store not found");
     }
-    const indexInfo = scopeInfo.indexMap.get(indexName);
+    const indexInfo = storeMeta.indexMap.get(indexName);
     if (!indexInfo) {
       throw Error("index not found");
     }
-    scopeInfo.indexMap.delete(indexName);
-    let stmt: Sqlite3Statement;
-    if (indexInfo.unique) {
-      stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
-    } else {
-      stmt = this._prep(sqlIndexDataDeleteAll);
-    }
-    stmt.run({
-      index_id: indexInfo.indexId,
-    });
-    this._prep(sqlIndexDelete).run({
-      index_id: indexInfo.indexId,
-    });
+    storeMeta.indexMap.delete(indexName);
+    indexInfo.currentName = undefined;
   }
 
   async rollback(btx: DatabaseTransaction): Promise<void> {
@@ -1468,13 +1508,24 @@ export class SqliteBackend implements Backend {
     if (this.enableTracing) {
       console.log(`rolling back transaction ${btx.transactionCookie}`);
     }
+    const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+    if (!connInfo) {
+      throw Error("not connected");
+    }
     if (this.txLevel === TransactionLevel.None) {
       return;
     }
-    this._prep(sqlRollback).run();
+    await (await this._prep(sqlRollback)).run();
+    connInfo.storeList = [];
+    connInfo.storeMap.clear();
+    const objectStoreNames: string[] = await this._loadObjectStoreNames(
+      connInfo.databaseName,
+    );
+    for (const storeName of objectStoreNames) {
+      await this._loadScopeInfo(connInfo, storeName);
+    }
     this.txLevel = TransactionLevel.None;
     this.transactionMap.delete(btx.transactionCookie);
-    this.txScope.clear();
     this.transactionDoneCond.trigger();
   }
 
@@ -1483,19 +1534,88 @@ export class SqliteBackend implements Backend {
     if (!txInfo) {
       throw Error("transaction not found");
     }
+    const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+    if (!connInfo) {
+      throw Error("not connected");
+    }
     if (this.enableTracing) {
       console.log(`committing transaction ${btx.transactionCookie}`);
     }
     if (this.txLevel === TransactionLevel.None) {
       return;
     }
-    this._prep(sqlCommit).run();
+    if (this.txLevel === TransactionLevel.VersionChange) {
+      for (const store of connInfo.storeList) {
+        if (store.currentName == null) {
+          await this._doDeleteObjectStore(store);
+          continue;
+        }
+        if (store.objectStoreId == null) {
+          await this._provideObjectStore(connInfo, store);
+        }
+        if (store.nameDirty) {
+          await (await this._prep(sqlRenameObjectStore)).run({
+            object_store_id: store.objectStoreId,
+            name: store.currentName,
+          });
+        }
+        for (const indexMeta of store.indexList) {
+          if (indexMeta.currentName == null) {
+            await this._doDeleteIndex(indexMeta);
+            continue;
+          }
+          if (indexMeta.indexId == null) {
+            await this._provideIndex(connInfo, store, indexMeta);
+          }
+          if (indexMeta.nameDirty) {
+            await (await this._prep(sqlRenameIndex)).run({
+              index_id: indexMeta.indexId,
+              name: indexMeta.currentName,
+            });
+            indexMeta.nameDirty = false;
+          }
+        }
+      }
+    }
+    await (await this._prep(sqlCommit)).run();
     this.txLevel = TransactionLevel.None;
-    this.txScope.clear();
     this.transactionMap.delete(btx.transactionCookie);
     this.transactionDoneCond.trigger();
   }
 
+  async _provideObjectStore(
+    connInfo: ConnectionInfo,
+    storeMeta: MyStoreMeta,
+  ): Promise<SqliteRowid> {
+    if (storeMeta.objectStoreId != null) {
+      return storeMeta.objectStoreId;
+    }
+    if (!storeMeta.currentName) {
+      throw Error("invalid state");
+    }
+
+    const runRes = await (await this._prep(sqlCreateObjectStore)).run({
+      name: storeMeta.currentName,
+      key_path: serializeKeyPath(storeMeta.keyPath),
+      auto_increment: storeMeta.autoIncrement ? 1 : 0,
+      database_name: connInfo.databaseName,
+    });
+
+    storeMeta.objectStoreId = runRes.lastInsertRowid;
+
+    for (const indexMeta of storeMeta.indexList) {
+      if (indexMeta.currentName == null) {
+        continue;
+      }
+      if (indexMeta.indexId != null) {
+        throw Error("invariant violated");
+      }
+      await this._provideIndex(connInfo, storeMeta, indexMeta);
+    }
+
+    return runRes.lastInsertRowid;
+  }
+
   createObjectStore(
     btx: DatabaseTransaction,
     name: string,
@@ -1513,20 +1633,70 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.VersionChange) {
       throw Error("only allowed in versionchange transaction");
     }
-    if (this.txScope.has(name)) {
+    if (connInfo.storeMap.has(name)) {
       throw Error("object store already exists");
     }
-    let myKeyPath = serializeKeyPath(keyPath);
-    const runRes = this._prep(sqlCreateObjectStore).run({
-      name,
-      key_path: myKeyPath,
-      auto_increment: autoIncrement ? 1 : 0,
-      database_name: connInfo.databaseName,
-    });
-    this.txScope.set(name, {
-      objectStoreId: runRes.lastInsertRowid,
+    const storeMeta: MyStoreMeta = {
+      objectStoreId: undefined,
       indexMap: new Map(),
+      indexList: [],
+      // Normalize
+      keyPath: deserializeKeyPath(serializeKeyPath(keyPath)),
+      autoIncrement: autoIncrement,
+      currentName: name,
+      nameDirty: false,
+    };
+    connInfo.storeList.push(storeMeta);
+    connInfo.storeMap.set(name, storeMeta);
+  }
+
+  async _provideIndex(
+    connInfo: ConnectionInfo,
+    storeMeta: MyStoreMeta,
+    indexMeta: MyIndexMeta,
+  ) {
+    if (indexMeta.indexId != null) {
+      return indexMeta.indexId;
+    }
+    if (storeMeta.objectStoreId == null) {
+      throw Error("invariant failed");
+    }
+    const res = await (await this._prep(sqlCreateIndex)).run({
+      object_store_id: storeMeta.objectStoreId,
+      name: indexMeta.currentName,
+      key_path: serializeKeyPath(indexMeta.keyPath),
+      unique: indexMeta.unique ? 1 : 0,
+      multientry: indexMeta.multiEntry ? 1 : 0,
     });
+    indexMeta.indexId = res.lastInsertRowid;
+    // FIXME: We can't use an iterator here, as it's not allowed to
+    // execute a write statement while the iterator executes.
+    // Maybe do multiple selects instead of loading everything into memory?
+    const keyRowsRes = await (await this._prep(sqlObjectDataGetAll)).getAll({
+      object_store_id: storeMeta.objectStoreId,
+    });
+
+    for (const keyRow of keyRowsRes) {
+      assertDbInvariant(typeof keyRow === "object" && keyRow != null);
+      assertDbInvariant("key" in keyRow);
+      assertDbInvariant("value" in keyRow);
+      assertDbInvariant(typeof keyRow.value === "string");
+      const key = keyRow.key;
+      const value = structuredRevive(JSON.parse(keyRow.value));
+      assertDbInvariant(key instanceof Uint8Array);
+      try {
+        await this.insertIntoIndex(indexMeta, key, value);
+      } catch (e) {
+        // FIXME: Catch this in insertIntoIndex!
+        if (e instanceof DataError) {
+          // https://www.w3.org/TR/IndexedDB-2/#object-store-storage-operation
+          // Do nothing
+        } else {
+          throw e;
+        }
+      }
+    }
+    return res.lastInsertRowid;
   }
 
   createIndex(
@@ -1548,7 +1718,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.VersionChange) {
       throw Error("only allowed in versionchange transaction");
     }
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error("object store does not exist, can't create index");
     }
@@ -1560,48 +1730,16 @@ export class SqliteBackend implements Backend {
       console.log(`creating index "${indexName}"`);
     }
 
-    const res = this._prep(sqlCreateIndex).run({
-      object_store_id: scopeInfo.objectStoreId,
-      name: indexName,
-      key_path: serializeKeyPath(keyPath),
-      unique: unique ? 1 : 0,
-      multientry: multiEntry ? 1 : 0,
-    });
-    const scopeIndexInfo: ScopeIndexInfo = {
-      indexId: res.lastInsertRowid,
+    const scopeIndexInfo: MyIndexMeta = {
+      indexId: undefined,
       keyPath,
       multiEntry,
       unique,
+      currentName: indexName,
+      nameDirty: false,
     };
+    scopeInfo.indexList.push(scopeIndexInfo);
     scopeInfo.indexMap.set(indexName, scopeIndexInfo);
-
-    // FIXME: We can't use an iterator here, as it's not allowed to
-    // execute a write statement while the iterator executes.
-    // Maybe do multiple selects instead of loading everything into memory?
-    const keyRowsRes = this._prep(sqlObjectDataGetAll).getAll({
-      object_store_id: scopeInfo.objectStoreId,
-    });
-
-    for (const keyRow of keyRowsRes) {
-      assertDbInvariant(typeof keyRow === "object" && keyRow != null);
-      assertDbInvariant("key" in keyRow);
-      assertDbInvariant("value" in keyRow);
-      assertDbInvariant(typeof keyRow.value === "string");
-      const key = keyRow.key;
-      const value = structuredRevive(JSON.parse(keyRow.value));
-      assertDbInvariant(key instanceof Uint8Array);
-      try {
-        this.insertIntoIndex(scopeIndexInfo, key, value);
-      } catch (e) {
-        // FIXME: Catch this in insertIntoIndex!
-        if (e instanceof DataError) {
-          // https://www.w3.org/TR/IndexedDB-2/#object-store-storage-operation
-          // Do nothing
-        } else {
-          throw e;
-        }
-      }
-    }
   }
 
   async deleteRecord(
@@ -1620,7 +1758,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Write) {
       throw Error("store operation only allowed while running a transaction");
     }
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -1629,6 +1767,8 @@ export class SqliteBackend implements Backend {
       );
     }
 
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+
     // PERF: We delete keys one-by-one here.
     // Instead, we could do it with a single
     // delete query for the object data / index data.
@@ -1637,15 +1777,15 @@ export class SqliteBackend implements Backend {
 
     if (range.lower != null) {
       const targetKey = serializeKey(range.lower);
-      currKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         currentKey: null,
         forward: true,
         inclusive: true,
         targetKey,
       });
     } else {
-      currKey = this._startObjectKey(scopeInfo.objectStoreId, true);
+      currKey = await this._startObjectKey(objectStoreId, true);
     }
 
     let upperBound: Uint8Array | undefined;
@@ -1672,7 +1812,7 @@ export class SqliteBackend implements Backend {
 
       // Now delete!
 
-      this._prep(sqlObjectDataDeleteKey).run({
+      await (await this._prep(sqlObjectDataDeleteKey)).run({
         object_store_id: scopeInfo.objectStoreId,
         key: currKey,
       });
@@ -1680,18 +1820,18 @@ export class SqliteBackend implements Backend {
       for (const index of scopeInfo.indexMap.values()) {
         let stmt: Sqlite3Statement;
         if (index.unique) {
-          stmt = this._prep(sqlUniqueIndexDataDeleteKey);
+          stmt = await this._prep(sqlUniqueIndexDataDeleteKey);
         } else {
-          stmt = this._prep(sqlIndexDataDeleteKey);
+          stmt = await this._prep(sqlIndexDataDeleteKey);
         }
-        stmt.run({
+        await stmt.run({
           index_id: index.indexId,
           object_key: currKey,
         });
       }
 
-      currKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         currentKey: null,
         forward: true,
         inclusive: false,
@@ -1715,7 +1855,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Write) {
       throw Error("store operation only allowed while running a transaction");
     }
-    const scopeInfo = this.txScope.get(storeReq.objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(storeReq.objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -1723,8 +1863,9 @@ export class SqliteBackend implements Backend {
         )} not in transaction scope`,
       );
     }
-    const metaRes = this._prep(sqlGetObjectStoreMetaById).getFirst({
-      id: scopeInfo.objectStoreId,
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+    const metaRes = await (await 
this._prep(sqlGetObjectStoreMetaById)).getFirst({
+      id: objectStoreId satisfies SqliteRowid,
     });
     if (metaRes === undefined) {
       throw Error(
@@ -1776,8 +1917,8 @@ export class SqliteBackend implements Backend {
 
     const serializedObjectKey = serializeKey(key);
 
-    const existingObj = this._getObjectValue(
-      scopeInfo.objectStoreId,
+    const existingObj = await this._getObjectValue(
+      objectStoreId,
       serializedObjectKey,
     );
 
@@ -1787,30 +1928,31 @@ export class SqliteBackend implements Backend {
       }
     }
 
-    this._prep(sqlInsertObjectData).run({
-      object_store_id: scopeInfo.objectStoreId,
+    await (await this._prep(sqlInsertObjectData)).run({
+      object_store_id: objectStoreId,
       key: serializedObjectKey,
       value: JSON.stringify(structuredEncapsulate(value)),
     });
 
     if (autoIncrement != 0) {
-      this._prep(sqlUpdateAutoIncrement).run({
-        object_store_id: scopeInfo.objectStoreId,
+      await (await this._prep(sqlUpdateAutoIncrement)).run({
+        object_store_id: objectStoreId,
         auto_increment: updatedKeyGenerator,
       });
     }
 
     for (const [k, indexInfo] of scopeInfo.indexMap.entries()) {
+      const indexId = await this._provideIndex(connInfo, scopeInfo, indexInfo);
       if (existingObj) {
-        this.deleteFromIndex(
-          indexInfo.indexId,
+        await this.deleteFromIndex(
+          indexId,
           indexInfo.unique,
           serializedObjectKey,
         );
       }
 
       try {
-        this.insertIntoIndex(indexInfo, serializedObjectKey, value);
+        await this.insertIntoIndex(indexInfo, serializedObjectKey, value);
       } catch (e) {
         // FIXME: handle this in insertIntoIndex!
         if (e instanceof DataError) {
@@ -1831,28 +1973,28 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  private deleteFromIndex(
+  private async deleteFromIndex(
     indexId: SqliteRowid,
     indexUnique: boolean,
     objectKey: Uint8Array,
-  ): void {
+  ): Promise<void> {
     let stmt: Sqlite3Statement;
     if (indexUnique) {
-      stmt = this._prep(sqlUniqueIndexDataDeleteKey);
+      stmt = await this._prep(sqlUniqueIndexDataDeleteKey);
     } else {
-      stmt = this._prep(sqlIndexDataDeleteKey);
+      stmt = await this._prep(sqlIndexDataDeleteKey);
     }
-    stmt.run({
+    await stmt.run({
       index_id: indexId,
       object_key: objectKey,
     });
   }
 
-  private insertIntoIndex(
-    indexInfo: ScopeIndexInfo,
+  private async insertIntoIndex(
+    indexInfo: MyIndexMeta,
     primaryKey: Uint8Array,
     value: any,
-  ): void {
+  ): Promise<void> {
     const indexKeys = getIndexKeys(
       value,
       indexInfo.keyPath,
@@ -1864,16 +2006,16 @@ export class SqliteBackend implements Backend {
 
     let stmt;
     if (indexInfo.unique) {
-      stmt = this._prep(sqlInsertUniqueIndexData);
+      stmt = await this._prep(sqlInsertUniqueIndexData);
     } else {
-      stmt = this._prep(sqlInsertIndexData);
+      stmt = await this._prep(sqlInsertIndexData);
     }
 
     for (const indexKey of indexKeys) {
       // FIXME: Re-throw correct error for unique index violations
       const serializedIndexKey = serializeKey(indexKey);
       try {
-        stmt.run({
+        await stmt.run({
           index_id: indexInfo.indexId,
           object_key: primaryKey,
           index_key: serializedIndexKey,
@@ -1902,7 +2044,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Write) {
       throw Error("store operation only allowed while running a transaction");
     }
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -1911,32 +2053,33 @@ export class SqliteBackend implements Backend {
       );
     }
 
-    this._prep(sqlClearObjectStore).run({
+    await (await this._prep(sqlClearObjectStore)).run({
       object_store_id: scopeInfo.objectStoreId,
     });
 
     for (const index of scopeInfo.indexMap.values()) {
       let stmt: Sqlite3Statement;
       if (index.unique) {
-        stmt = this._prep(sqlClearUniqueIndexData);
+        stmt = await this._prep(sqlClearUniqueIndexData);
       } else {
-        stmt = this._prep(sqlClearIndexData);
+        stmt = await this._prep(sqlClearIndexData);
       }
-      stmt.run({
+      await stmt.run({
         index_id: index.indexId,
       });
     }
   }
 
   async backupToFile(path: string): Promise<void> {
-    const stmt = this._prep("VACUUM INTO $filename;");
-    stmt.run({
+    const stmt = await this._prep("VACUUM INTO $filename;");
+    await stmt.run({
       filename: path,
     });
   }
 }
 
 const schemaSql = `
+BEGIN;
 CREATE TABLE IF NOT EXISTS databases
 ( name TEXT PRIMARY KEY
 , version INTEGER NOT NULL
@@ -1987,6 +2130,7 @@ CREATE TABLE IF NOT EXISTS unique_index_data
 , FOREIGN KEY (index_id)
     REFERENCES indexes(id)
 );
+COMMIT;
 `;
 
 const sqlClearObjectStore = `
@@ -2329,8 +2473,8 @@ export async function createSqliteBackend(
   sqliteImpl: Sqlite3Interface,
   options: SqliteBackendOptions,
 ): Promise<SqliteBackend> {
-  const db = sqliteImpl.open(options.filename);
-  db.exec("PRAGMA foreign_keys = ON;");
-  db.exec(schemaSql);
+  const db = await sqliteImpl.open(options.filename);
+  await db.exec("PRAGMA foreign_keys = ON;");
+  await db.exec(schemaSql);
   return new SqliteBackend(sqliteImpl, db);
 }
diff --git a/packages/idb-bridge/src/backend-interface.ts 
b/packages/idb-bridge/src/backend-interface.ts
index 690f92f54..201e1aea7 100644
--- a/packages/idb-bridge/src/backend-interface.ts
+++ b/packages/idb-bridge/src/backend-interface.ts
@@ -213,7 +213,7 @@ export interface Backend {
     indexName: string,
   ): void;
 
-  rollback(btx: DatabaseTransaction): void;
+  rollback(btx: DatabaseTransaction): Promise<void>;
 
   // FIXME: Should probably not be async
   commit(btx: DatabaseTransaction): Promise<void>;
diff --git a/packages/idb-bridge/src/bench.ts b/packages/idb-bridge/src/bench.ts
index d196bacb1..9eb0c43d7 100644
--- a/packages/idb-bridge/src/bench.ts
+++ b/packages/idb-bridge/src/bench.ts
@@ -22,7 +22,7 @@ import {
   BridgeIDBTransaction,
   createSqliteBackend,
 } from "./index.js";
-import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+import { createNodeBetterSqlite3Impl } from "./node-better-sqlite3-impl.js";
 
 function openDb(idbFactory: BridgeIDBFactory): Promise<BridgeIDBDatabase> {
   return new Promise((resolve, reject) => {
@@ -82,7 +82,7 @@ async function main() {
 
   console.log(`doing ${nTx} iterations of ${nInsert} items`);
 
-  const sqlite3Impl = await createNodeSqlite3Impl();
+  const sqlite3Impl = await createNodeBetterSqlite3Impl();
   const backend = await createSqliteBackend(sqlite3Impl, {
     filename,
   });
diff --git a/packages/idb-bridge/src/bridge-idb.ts 
b/packages/idb-bridge/src/bridge-idb.ts
index 15c68c733..6b710ea71 100644
--- a/packages/idb-bridge/src/bridge-idb.ts
+++ b/packages/idb-bridge/src/bridge-idb.ts
@@ -622,7 +622,10 @@ export class BridgeIDBDatabase extends FakeEventTarget 
implements IDBDatabase {
 
     if (transactionsComplete) {
       this._closed = true;
-      this._backend.close(this._backendConnection);
+      this._backend.close(this._backendConnection).catch((e) => {
+        console.error("Error while closing connection.");
+        console.error(e);
+      });
     } else {
       queueTask(() => {
         this._closeConnection();
@@ -871,9 +874,16 @@ export class BridgeIDBFactory {
       throw new TypeError();
     }
 
+    if (BridgeIDBFactory.enableTracing) {
+      console.log(`opening DB ${name} at version ${version}`);
+    }
+
     const request = new BridgeIDBOpenDBRequest();
 
     queueTask(async () => {
+      if (BridgeIDBFactory.enableTracing) {
+        console.log(`running task to open DB ${name} at version ${version}`);
+      }
       let dbConnRes: ConnectResult;
       try {
         if (BridgeIDBFactory.enableTracing) {
@@ -901,13 +911,25 @@ export class BridgeIDBFactory {
 
       const requestedVersion = version;
 
+      if (BridgeIDBFactory.enableTracing) {
+        console.log(`existing version of DB ${name} is ${existingVersion}, 
requesting ${requestedVersion}`);
+      }
+
       BridgeIDBFactory.enableTracing &&
         console.log(
           `TRACE: existing version ${existingVersion}, requested version 
${requestedVersion}`,
         );
 
       if (existingVersion > requestedVersion) {
-        request._finishWithError(new VersionError());
+        this.backend.close(dbConnRes.conn).catch((e) => {
+          console.error("failed to close database");
+          console.error(e);
+        });
+        request._finishWithError(
+          new VersionError(
+            `requested version ${requestedVersion}, existing version is 
${existingVersion}`,
+          ),
+        );
         return;
       }
 
@@ -935,7 +957,7 @@ export class BridgeIDBFactory {
           if (otherConn._name != db._name) {
             continue;
           }
-          if (otherConn._closePending) {
+          if (otherConn._closePending || otherConn._closed) {
             continue;
           }
           if (BridgeIDBFactory.enableTracing) {
@@ -950,7 +972,7 @@ export class BridgeIDBFactory {
           otherConn.dispatchEvent(event);
         }
 
-        if (this._anyOpen()) {
+        if (this._anyOpen(name)) {
           if (BridgeIDBFactory.enableTracing) {
             console.log(
               "other connections are still open, dispatching 'blocked' event 
to other connection",
@@ -1057,8 +1079,19 @@ export class BridgeIDBFactory {
     return "[object IDBFactory]";
   }
 
-  private _anyOpen(): boolean {
-    return this.connections.some((c) => !c._closed && !c._closePending);
+  private _anyOpen(dbName: string): boolean {
+    let numOpen = 0;
+    for (const conn of this.connections) {
+      if (conn._name == dbName && !conn._closed && !conn._closePending) {
+        numOpen++;
+      }
+    }
+    if (numOpen > 0) {
+      if (BridgeIDBFactory.enableTracing) {
+        console.log(`there are ${numOpen} connections still open to 
${dbName}`);
+      }
+    }
+    return numOpen > 0;
   }
 }
 
@@ -1164,7 +1197,7 @@ export class BridgeIDBIndex implements IDBIndex {
     indexSet.splice(indexIdx, 1);
   }
 
-  _abort() {
+  _abort(): void {
     if (this._originalName != null) {
       this._applyNameChange(this._name, this._originalName);
     }
@@ -2374,15 +2407,6 @@ export class BridgeIDBRequest extends FakeEventTarget 
implements IDBRequest {
 
     this.dispatchEvent(event);
   }
-
-  _finishWithResult(result: any) {
-    this.result = result;
-    this.readyState = "done";
-
-    const event = new FakeEvent("success");
-    event.eventPath = [];
-    this.dispatchEvent(event);
-  }
 }
 
 export class BridgeIDBOpenDBRequest
@@ -2541,7 +2565,7 @@ export class BridgeIDBTransaction
   }
 
   // 
http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#dfn-steps-for-aborting-a-transaction
-  async _abort(errName: string | null) {
+  _abort(errName: string | null): void {
     if (BridgeIDBFactory.enableTracing) {
       console.log("TRACE: aborting transaction");
     }
@@ -2589,7 +2613,9 @@ export class BridgeIDBTransaction
 
     const maybeBtx = this._backendTransaction;
     if (maybeBtx) {
-      this._backend.rollback(maybeBtx);
+      this._backend.rollback(maybeBtx).catch((e) => {
+        console.warn(`error during rollback: ${e}`);
+      });
     }
 
     // "Any object stores and indexes which were created during the
@@ -2744,7 +2770,10 @@ export class BridgeIDBTransaction
           // Probably there is a more elegant way to do this by aborting the
           // beginTransaction call when the transaction was aborted.
           // That would require changing the DB backend API.
-          this._backend.rollback(newBackendTx);
+          this._backend.rollback(newBackendTx).catch((e) => {
+            console.error("error during rollback");
+            console.error(e);
+          });
         } else {
           this._backendTransaction = newBackendTx;
         }
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts 
b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
index b8046fc1b..8782d4891 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
@@ -140,6 +140,7 @@ test("WPT idbfactory-open7.htm", async (t) => {
 
     open_rq.onupgradeneeded = function () {};
     open_rq.onsuccess = function (e: any) {
+      console.log("open7 - opening higher version DB");
       var db = e.target.result;
       db.close();
 
@@ -152,8 +153,12 @@ test("WPT idbfactory-open7.htm", async (t) => {
     };
 
     function open_current_db(e: any) {
-      var open_rq3 = indexedDB.open(e.target.result.name);
+      console.log("open7 - opening current DB");
+      const name = e.target.result.name;
+      console.log(`open7 - name is ${name}`)
+      var open_rq3 = indexedDB.open(name);
       open_rq3.onsuccess = function (e: any) {
+        console.log("open7 - success opening current DB");
         t.deepEqual(e.target.result.version, 14, "db.version");
         open_rq2.result.close();
         open_rq3.result.close();
@@ -278,6 +283,8 @@ test("WPT idbfactory-open9.htm", async (t) => {
   await should_work(1.5, 1);
   await should_work(Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER); // 
0x20000000000000 - 1
   await should_work(undefined, 1);
+
+  console.error("test at end");
 });
 
 // IDBFactory.open() - error in version change transaction aborts open
diff --git a/packages/idb-bridge/src/index.ts b/packages/idb-bridge/src/index.ts
index 47ff80119..18be5a49f 100644
--- a/packages/idb-bridge/src/index.ts
+++ b/packages/idb-bridge/src/index.ts
@@ -32,8 +32,8 @@ import {
 } from "./MemoryBackend.js";
 import { Listener } from "./util/FakeEventTarget.js";
 
-export * from "./SqliteBackend.js";
 export * from "./sqlite3-interface.js";
+export * from "./SqliteBackend.js";
 
 export * from "./idbtypes.js";
 export { MemoryBackend } from "./MemoryBackend.js";
@@ -49,25 +49,25 @@ export {
   BridgeIDBOpenDBRequest,
   BridgeIDBRequest,
   BridgeIDBTransaction,
-  StoreLevel,
   ResultLevel,
+  StoreLevel,
 };
 export type {
-  DatabaseTransaction,
-  RecordGetResponse,
   Backend,
-  DatabaseList,
-  RecordStoreRequest,
-  RecordStoreResponse,
   DatabaseConnection,
-  RequestObj,
   DatabaseDump,
-  ObjectStoreDump,
-  IndexRecord,
-  ObjectStoreRecord,
-  MemoryBackendDump,
+  DatabaseList,
+  DatabaseTransaction,
   Event,
+  IndexRecord,
   Listener,
+  MemoryBackendDump,
+  ObjectStoreDump,
+  ObjectStoreRecord,
+  RecordGetResponse,
+  RecordStoreRequest,
+  RecordStoreResponse,
+  RequestObj,
 };
 
 // globalThis polyfill, see https://mathiasbynens.be/notes/globalthis
@@ -119,3 +119,6 @@ export function shimIndexedDB(factory: BridgeIDBFactory): 
void {
   g.IDBTransaction = BridgeIDBTransaction;
   g.IDBVersionChangeEvent = BridgeIDBVersionChangeEvent;
 }
+
+export { createNodeBetterSqlite3Impl } from "./node-better-sqlite3-impl.js";
+export { createNodeHelperSqlite3Impl } from "./node-helper-sqlite3-impl.js";
diff --git a/packages/idb-bridge/src/node-sqlite3-impl.ts 
b/packages/idb-bridge/src/node-better-sqlite3-impl.ts
similarity index 84%
rename from packages/idb-bridge/src/node-sqlite3-impl.ts
rename to packages/idb-bridge/src/node-better-sqlite3-impl.ts
index fa38d298f..41c1d74d3 100644
--- a/packages/idb-bridge/src/node-sqlite3-impl.ts
+++ b/packages/idb-bridge/src/node-better-sqlite3-impl.ts
@@ -18,27 +18,28 @@
 import type Database from "better-sqlite3";
 import {
   ResultRow,
+  Sqlite3Database,
   Sqlite3Interface,
   Sqlite3Statement,
 } from "./sqlite3-interface.js";
 
-export async function createNodeSqlite3Impl(): Promise<Sqlite3Interface> {
+export async function createNodeBetterSqlite3Impl(): Promise<Sqlite3Interface> 
{
   // @ts-ignore: optional dependency
   const bsq = (await import("better-sqlite3")).default;
 
   return {
-    open(filename: string) {
+    async open(filename: string): Promise<Sqlite3Database> {
       const internalDbHandle = bsq(filename);
       return {
         internalDbHandle,
-        close() {
+        async close() {
           internalDbHandle.close();
         },
-        prepare(stmtStr): Sqlite3Statement {
+        async prepare(stmtStr): Promise<Sqlite3Statement> {
           const stmtHandle = internalDbHandle.prepare(stmtStr);
           return {
             internalStatement: stmtHandle,
-            getAll(params): ResultRow[] {
+            async getAll(params): Promise<ResultRow[]> {
               let res: ResultRow[];
               if (params === undefined) {
                 res = stmtHandle.all() as ResultRow[];
@@ -47,7 +48,7 @@ export async function createNodeSqlite3Impl(): 
Promise<Sqlite3Interface> {
               }
               return res;
             },
-            getFirst(params): ResultRow | undefined {
+            async getFirst(params): Promise<ResultRow | undefined> {
               let res: ResultRow | undefined;
               if (params === undefined) {
                 res = stmtHandle.get() as ResultRow | undefined;
@@ -56,7 +57,7 @@ export async function createNodeSqlite3Impl(): 
Promise<Sqlite3Interface> {
               }
               return res;
             },
-            run(params) {
+            async run(params) {
               const myParams = [];
               if (params !== undefined) {
                 myParams.push(params);
@@ -75,7 +76,7 @@ export async function createNodeSqlite3Impl(): 
Promise<Sqlite3Interface> {
             },
           };
         },
-        exec(sqlStr): void {
+        async exec(sqlStr: string): Promise<void> {
           internalDbHandle.exec(sqlStr);
         },
       };
diff --git a/packages/idb-bridge/src/node-helper-sqlite3-impl.test.ts 
b/packages/idb-bridge/src/node-helper-sqlite3-impl.test.ts
new file mode 100644
index 000000000..c1cdca3a3
--- /dev/null
+++ b/packages/idb-bridge/src/node-helper-sqlite3-impl.test.ts
@@ -0,0 +1,82 @@
+/*
+ Copyright 2019 Florian Dold
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ or implied. See the License for the specific language governing
+ permissions and limitations under the License.
+ */
+
+import test from "ava";
+import * as fs from "node:fs";
+import { createNodeHelperSqlite3Impl } from "./node-helper-sqlite3-impl.js";
+
+test("sqlite3 helper", async (t) => {
+  const filename = "mytestdb.sqlite3";
+  try {
+    fs.unlinkSync(filename);
+  } catch (e) {
+    // Do nothing.
+  }
+  const impl = await createNodeHelperSqlite3Impl();
+
+  const db = await impl.open(filename);
+
+  await db.exec(`CREATE TABLE person(firstname, lastname, age);
+    CREATE TABLE book(title, author, published);
+    CREATE TABLE publisher(name, address);`);
+
+  const stmt1 = await db.prepare(
+    "INSERT INTO book (title, author, published) VALUES ($title, $author, 
$published)",
+  );
+
+  const stmt2 = await db.prepare(
+    "SELECT author, title, NULL as foobar, 42, published FROM book WHERE 
published=$published ORDER BY author",
+  );
+
+  const stmtBegin = await db.prepare("BEGIN");
+
+  const stmtCommit = await db.prepare("COMMIT");
+
+  await stmtBegin.run();
+
+  const r1 = await stmt1.run({
+    title: "foo",
+    author: "bar",
+    published: 1995,
+  });
+
+  const r2 = await stmt1.run({
+    title: "foo2",
+    author: "bar2",
+    published: 1998,
+  });
+
+  await stmt1.run({
+    title: "foo4",
+    author: "bar4",
+    published: 1995,
+  });
+
+  t.deepEqual(r1.lastInsertRowid, 1n);
+  t.deepEqual(r2.lastInsertRowid, 2n);
+
+  const r3 = await stmtCommit.run();
+
+  const getRes1 = await stmt2.getAll({
+    published: 1995,
+  });
+
+  t.deepEqual(getRes1.length, 2);
+  t.deepEqual(getRes1[0].title, "foo");
+  t.deepEqual(getRes1[1].title, "foo4");
+
+  t.pass();
+});
diff --git a/packages/idb-bridge/src/node-helper-sqlite3-impl.ts 
b/packages/idb-bridge/src/node-helper-sqlite3-impl.ts
new file mode 100644
index 000000000..3ba37576b
--- /dev/null
+++ b/packages/idb-bridge/src/node-helper-sqlite3-impl.ts
@@ -0,0 +1,572 @@
+/*
+ This file is part of GNU Taler
+ (C) 2024 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+ */
+
+import stream from "node:stream";
+import {
+  BindParams,
+  ResultRow,
+  RunResult,
+  Sqlite3Database,
+  Sqlite3Interface,
+  Sqlite3Statement,
+} from "./sqlite3-interface.js";
+
+import child_process, { ChildProcessByStdio } from "node:child_process";
+import { openPromise } from "./util/openPromise.js";
+
+enum HelperCmd {
+  HELLO = 1,
+  SHUTDOWN = 2,
+  OPEN = 3,
+  CLOSE = 4,
+  PREPARE = 5,
+  STMT_GET_ALL = 6,
+  STMT_GET_FIRST = 7,
+  STMT_RUN = 8,
+  EXEC = 9,
+}
+
+enum HelperResp {
+  OK = 1,
+  FAIL = 2,
+  ROWLIST = 3,
+  RUNRESULT = 4,
+  STMT = 5,
+}
+
+function concatArr(as: Uint8Array[]): Uint8Array {
+  let len = 0;
+  for (const a of as) {
+    len += a.length;
+  }
+  const b = new Uint8Array(len);
+  let pos = 0;
+  for (const a of as) {
+    b.set(a, pos);
+    pos += a.length;
+  }
+  return b;
+}
+
+interface ReqInfo {
+  resolve: (x: Uint8Array) => void;
+}
+
+class Helper {
+  private reqCounter = 0;
+  private reqMap: Map<number, ReqInfo> = new Map();
+  private inChunks: Uint8Array[] = [];
+  private inSize: number = 0;
+  private expectSize: number = 0;
+  private enableTracing: boolean;
+  private isListening: boolean = false;
+  public proc: ChildProcessByStdio<stream.Writable, stream.Readable, null>;
+
+  constructor(opts?: { enableTracing: boolean }) {
+    this.enableTracing = opts?.enableTracing ?? false;
+    this.proc = child_process.spawn("taler-helper-sqlite3", {
+      stdio: ["pipe", "pipe", "inherit"],
+    });
+    // Make sure that the process is not blocking the parent process
+    // from exiting.
+    // When we are actively waiting for a response, we ref it again.
+    this.unrefProc();
+  }
+
+  private unrefProc() {
+    this.proc.unref();
+    try {
+      // @ts-ignore
+      this.proc.stdout.unref();
+    } catch (e) {
+      // Do nothing.
+    }
+  }
+
+  private refProc() {
+    this.proc.ref();
+    try {
+      // @ts-ignore
+      this.proc.stdout.ref();
+    } catch (e) {
+      // Do nothing.
+    }
+  }
+
+  startListening() {
+    if (this.isListening) {
+      console.error("Warning: Already listening");
+      return;
+    }
+    if (this.enableTracing) {
+      console.error("starting listening for data");
+    }
+    this.refProc();
+    this.proc.stdout.on("data", (chunk: Uint8Array) => {
+      if (this.enableTracing) {
+        console.error(`received chunk of size ${chunk.length} from helper`);
+      }
+      this.inChunks.push(chunk);
+      this.inSize += chunk.length;
+
+      while (true) {
+        if (this.expectSize === 0) {
+          if (this.inSize >= 4) {
+            const data = concatArr(this.inChunks);
+            const dv = new DataView(data.buffer);
+            const len = dv.getUint32(0);
+            this.expectSize = len;
+            continue;
+          }
+        }
+
+        if (this.expectSize > 0 && this.inSize >= this.expectSize) {
+          const data = concatArr(this.inChunks);
+          const packet = data.slice(0, this.expectSize);
+          const rest = data.slice(this.expectSize);
+          this.inSize = this.inSize - packet.length;
+          this.inChunks = [rest];
+          this.expectSize = 0;
+          this.processResponse(packet);
+          continue;
+        }
+
+        break;
+      }
+    });
+    this.isListening = true;
+  }
+
+  processResponse(packet: Uint8Array): void {
+    const dv = new DataView(packet.buffer);
+    const reqId = dv.getUint32(4);
+    if (this.enableTracing) {
+      console.error(
+        `processing complete response packet to ${reqId} from helper`,
+      );
+    }
+    const ri = this.reqMap.get(reqId);
+    if (!ri) {
+      console.error(`no request for response with ID ${reqId}`);
+      return;
+    }
+    this.reqMap.delete(reqId);
+    ri.resolve(packet.slice(8));
+  }
+
+  async communicate(cmd: number, payload: Uint8Array): Promise<Uint8Array> {
+    if (!this.isListening) {
+      this.startListening();
+    }
+
+    const prom = openPromise<Uint8Array>();
+    const reqNum = ++this.reqCounter;
+    this.reqMap.set(reqNum, {
+      resolve: prom.resolve,
+    });
+    // len, reqId, reqType, payload
+    const bufLen = 4 + 4 + 1 + payload.length;
+    const buf = new Uint8Array(bufLen);
+    const dv = new DataView(buf.buffer);
+    dv.setUint32(0, bufLen);
+    dv.setUint32(4, reqNum);
+    dv.setUint8(8, cmd);
+    buf.set(payload, 9);
+
+    await new Promise<void>((resolve, reject) => {
+      if (this.enableTracing) {
+        console.error(`writing to helper stdin for request ${reqNum}`);
+      }
+      this.proc.stdin.write(buf, (err) => {
+        if (this.enableTracing) {
+          console.error(`done writing to helper stdin for request ${reqNum}`);
+        }
+        if (err) {
+          reject(err);
+          return;
+        }
+        resolve();
+      });
+    });
+    const resp = await prom.promise;
+    if (this.enableTracing) {
+      console.error(
+        `request to ${reqNum} got result, reqMap keys ${[
+          ...this.reqMap.keys(),
+        ]}`,
+      );
+    }
+    if (this.reqMap.size === 0) {
+      this.isListening = false;
+      this.proc.stdout.removeAllListeners();
+      this.unrefProc();
+    }
+    return resp;
+  }
+}
+
+enum TypeTag {
+  NULL = 1,
+  INT = 2,
+  REAL = 3,
+  TEXT = 4,
+  BLOB = 5,
+}
+
+function encodeParams(wr: Writer, params: BindParams | undefined): void {
+  const keys = Object.keys(params ?? {});
+  wr.writeUint16(keys.length);
+  for (const key of keys) {
+    wr.writeString(key);
+    const val = params![key];
+    if (typeof val === "number" || typeof val === "bigint") {
+      wr.writeUint8(TypeTag.INT);
+      wr.writeInt64(BigInt(val));
+    } else if (val == null) {
+      wr.writeUint8(TypeTag.NULL);
+    } else if (typeof val === "string") {
+      wr.writeUint8(TypeTag.TEXT);
+      wr.writeString(val);
+    } else if (ArrayBuffer.isView(val)) {
+      wr.writeUint8(TypeTag.BLOB);
+      wr.writeUint32(val.length);
+      wr.writeRawBytes(val);
+    } else {
+      throw Error("unsupported type for bind params");
+    }
+  }
+}
+
+function decodeRowList(rd: Reader): ResultRow[] {
+  const rows: ResultRow[] = [];
+  const numRows = rd.readUint16();
+  const numCols = rd.readUint16();
+  const colNames: string[] = [];
+  for (let i = 0; i < numCols; i++) {
+    colNames.push(rd.readString());
+  }
+  for (let i = 0; i < numRows; i++) {
+    const row: ResultRow = {};
+    for (let j = 0; j < numCols; j++) {
+      const valTag = rd.readUint8();
+      if (valTag === TypeTag.NULL) {
+        row[colNames[j]] = null;
+      } else if (valTag == TypeTag.TEXT) {
+        row[colNames[j]] = rd.readString();
+      } else if (valTag == TypeTag.BLOB) {
+        row[colNames[j]] = rd.readBytes();
+      } else if (valTag == TypeTag.INT) {
+        let val: number | bigint = rd.readInt64();
+        if (val <= Number.MAX_SAFE_INTEGER && val >= Number.MIN_SAFE_INTEGER) {
+          val = Number(val);
+        }
+        row[colNames[j]] = val;
+      }
+    }
+    rows.push(row);
+  }
+  return rows;
+}
+
+class Reader {
+  public pos = 0;
+  private dv: DataView;
+  private td = new TextDecoder();
+  constructor(private buf: Uint8Array) {
+    this.dv = new DataView(buf.buffer);
+  }
+  readUint16(): number {
+    const res = this.dv.getUint16(this.pos);
+    this.pos += 2;
+    return res;
+  }
+  readInt64(): bigint {
+    const res = this.dv.getBigInt64(this.pos);
+    this.pos += 8;
+    return res;
+  }
+  readUint8(): number {
+    const res = this.dv.getUint8(this.pos);
+    this.pos += 1;
+    return res;
+  }
+  readString(): string {
+    const len = this.dv.getUint32(this.pos);
+    const strBuf = this.buf.slice(this.pos + 4, this.pos + 4 + len);
+    this.pos += 4 + len;
+    return this.td.decode(strBuf);
+  }
+  readBytes(): Uint8Array {
+    const len = this.dv.getUint32(this.pos);
+    const rBuf = this.buf.slice(this.pos + 4, this.pos + 4 + len);
+    this.pos += 4 + len;
+    return rBuf;
+  }
+}
+
+class Writer {
+  private chunks: Uint8Array[] = [];
+
+  private te = new TextEncoder();
+
+  /**
+   * Write raw bytes without any length-prefix.
+   */
+  writeRawBytes(b: Uint8Array): void {
+    this.chunks.push(b);
+  }
+
+  /**
+   * Write length-prefixed string.
+   */
+  writeString(s: string) {
+    const bufStr = this.te.encode(s);
+    this.writeUint32(bufStr.length);
+    this.chunks.push(bufStr);
+  }
+
+  writeUint8(n: number): void {
+    const buf = new Uint8Array(1);
+    const dv = new DataView(buf.buffer);
+    dv.setUint8(0, n);
+    this.chunks.push(buf);
+  }
+
+  writeUint16(n: number): void {
+    const buf = new Uint8Array(2);
+    const dv = new DataView(buf.buffer);
+    dv.setUint16(0, n);
+    this.chunks.push(buf);
+  }
+
+  writeUint32(n: number): void {
+    const buf = new Uint8Array(4);
+    const dv = new DataView(buf.buffer);
+    dv.setUint32(0, n);
+    this.chunks.push(buf);
+  }
+
+  writeInt64(n: bigint): void {
+    const buf = new Uint8Array(8);
+    const dv = new DataView(buf.buffer);
+    dv.setBigInt64(0, n);
+    this.chunks.push(buf);
+  }
+
+  reap(): Uint8Array {
+    return concatArr(this.chunks);
+  }
+}
+
+class Sqlite3Error extends Error {
+  // Name of "code" is to be compatible with better-sqlite3.
+  constructor(
+    message: string,
+    public code: string,
+  ) {
+    super(message);
+  }
+}
+
+function throwForFailure(rd: Reader): never {
+  const msg = rd.readString();
+  // Numeric error code
+  rd.readUint16();
+  const errName = rd.readString();
+  throw new Sqlite3Error(msg, errName);
+}
+
+function expectCommunicateSuccess(commRes: Uint8Array): void {
+  const rd = new Reader(commRes);
+  const respType = rd.readUint8();
+  if (respType == HelperResp.OK) {
+    // Good
+  } else if (respType == HelperResp.FAIL) {
+    throwForFailure(rd);
+  } else {
+    throw Error("unexpected response tag");
+  }
+}
+
+export async function createNodeHelperSqlite3Impl(
+  opts: { enableTracing?: boolean } = {},
+): Promise<Sqlite3Interface> {
+  const enableTracing = opts.enableTracing ?? false;
+  const helper = new Helper({ enableTracing });
+  const resp = await helper.communicate(HelperCmd.HELLO, new Uint8Array());
+
+  let counterDb = 1;
+  let counterPrep = 1;
+
+  return {
+    async open(filename: string): Promise<Sqlite3Database> {
+      if (enableTracing) {
+        console.error(`opening database ${filename}`);
+      }
+      const myDbId = counterDb++;
+      {
+        const wr = new Writer();
+        wr.writeUint16(myDbId);
+        wr.writeString(filename);
+        const payload = wr.reap();
+        const commRes = await helper.communicate(HelperCmd.OPEN, payload);
+        expectCommunicateSuccess(commRes);
+      }
+      if (enableTracing) {
+        console.error(`opened database ${filename}`);
+      }
+      return {
+        internalDbHandle: undefined,
+        async close() {
+          if (enableTracing) {
+            console.error(`closing database`);
+          }
+          const wr = new Writer();
+          wr.writeUint16(myDbId);
+          const payload = wr.reap();
+          const commRes = await helper.communicate(HelperCmd.CLOSE, payload);
+          expectCommunicateSuccess(commRes);
+        },
+        async prepare(stmtStr): Promise<Sqlite3Statement> {
+          const myPrepId = counterPrep++;
+          if (enableTracing) {
+            console.error(`preparing statement ${myPrepId}`);
+          }
+          {
+            const wr = new Writer();
+            wr.writeUint16(myDbId);
+            wr.writeUint16(myPrepId);
+            wr.writeString(stmtStr);
+            const payload = wr.reap();
+            const commRes = await helper.communicate(
+              HelperCmd.PREPARE,
+              payload,
+            );
+            expectCommunicateSuccess(commRes);
+          }
+          if (enableTracing) {
+            console.error(`prepared statement ${myPrepId}`);
+          }
+          return {
+            internalStatement: undefined,
+            async getAll(params?: BindParams): Promise<ResultRow[]> {
+              if (enableTracing) {
+                console.error(`running getAll`);
+              }
+              const wr = new Writer();
+              wr.writeUint16(myPrepId);
+              encodeParams(wr, params);
+              const payload = wr.reap();
+              const commRes = await helper.communicate(
+                HelperCmd.STMT_GET_ALL,
+                payload,
+              );
+              const rd = new Reader(commRes);
+              const respType = rd.readUint8();
+              if (respType === HelperResp.ROWLIST) {
+                const rows = decodeRowList(rd);
+                return rows;
+              } else if (respType === HelperResp.FAIL) {
+                throwForFailure(rd);
+              } else {
+                throw Error("unexpected result for getAll");
+              }
+            },
+            async getFirst(
+              params?: BindParams,
+            ): Promise<ResultRow | undefined> {
+              if (enableTracing) {
+                console.error(`running getFirst`);
+              }
+              const wr = new Writer();
+              wr.writeUint16(myPrepId);
+              encodeParams(wr, params);
+              const payload = wr.reap();
+              const commRes = await helper.communicate(
+                HelperCmd.STMT_GET_FIRST,
+                payload,
+              );
+              const rd = new Reader(commRes);
+              const respType = rd.readUint8();
+              if (respType === HelperResp.ROWLIST) {
+                const rows = decodeRowList(rd);
+                return rows[0];
+              } else if (respType === HelperResp.FAIL) {
+                throwForFailure(rd);
+              } else {
+                throw Error("unexpected result for getAll");
+              }
+            },
+            async run(params?: BindParams): Promise<RunResult> {
+              if (enableTracing) {
+                console.error(`running run`);
+              }
+              const wr = new Writer();
+              wr.writeUint16(myPrepId);
+              encodeParams(wr, params);
+              const payload = wr.reap();
+              const commRes = await helper.communicate(
+                HelperCmd.STMT_RUN,
+                payload,
+              );
+              if (enableTracing) {
+                console.error(`run got response`);
+              }
+              const rd = new Reader(commRes);
+              const respType = rd.readUint8();
+              if (respType === HelperResp.OK) {
+                if (enableTracing) {
+                  console.error(`run success (OK)`);
+                }
+                return {
+                  lastInsertRowid: 0,
+                };
+              } else if (respType === HelperResp.RUNRESULT) {
+                if (enableTracing) {
+                  console.error(`run success (RUNRESULT)`);
+                }
+                const lastInsertRowid = rd.readInt64();
+                return {
+                  lastInsertRowid,
+                };
+              } else if (respType === HelperResp.FAIL) {
+                if (enableTracing) {
+                  console.error(`run error (FAIL)`);
+                }
+                throwForFailure(rd);
+              } else {
+                throw Error("SQL run failed");
+              }
+            },
+          };
+        },
+        async exec(sqlStr: string): Promise<void> {
+          {
+            if (enableTracing) {
+              console.error(`running execute`);
+            }
+            const wr = new Writer();
+            wr.writeUint16(myDbId);
+            wr.writeString(sqlStr);
+            const payload = wr.reap();
+            const execRes = await helper.communicate(HelperCmd.EXEC, payload);
+            expectCommunicateSuccess(execRes);
+          }
+        },
+      };
+    },
+  };
+}
diff --git a/packages/idb-bridge/src/sqlite3-interface.ts 
b/packages/idb-bridge/src/sqlite3-interface.ts
index 8668ef844..434021420 100644
--- a/packages/idb-bridge/src/sqlite3-interface.ts
+++ b/packages/idb-bridge/src/sqlite3-interface.ts
@@ -1,15 +1,15 @@
 export type Sqlite3Database = {
   internalDbHandle: any;
-  exec(sqlStr: string): void;
-  prepare(stmtStr: string): Sqlite3Statement;
-  close(): void;
+  exec(sqlStr: string): Promise<void>;
+  prepare(stmtStr: string): Promise<Sqlite3Statement>;
+  close(): Promise<void>;
 };
 export type Sqlite3Statement = {
   internalStatement: any;
 
-  run(params?: BindParams): RunResult;
-  getAll(params?: BindParams): ResultRow[];
-  getFirst(params?: BindParams): ResultRow | undefined;
+  run(params?: BindParams): Promise<RunResult>;
+  getAll(params?: BindParams): Promise<ResultRow[]>;
+  getFirst(params?: BindParams): Promise<ResultRow | undefined>;
 };
 
 export interface RunResult {
@@ -30,5 +30,5 @@ export type ResultRow = Record<string, Sqlite3Value>;
  * to be used by our IndexedDB sqlite3 backend.
  */
 export interface Sqlite3Interface {
-  open(filename: string): Sqlite3Database;
+  open(filename: string): Promise<Sqlite3Database>;
 }
diff --git a/packages/idb-bridge/src/testingdb.ts 
b/packages/idb-bridge/src/testingdb.ts
index 6c13979ca..9f80cae74 100644
--- a/packages/idb-bridge/src/testingdb.ts
+++ b/packages/idb-bridge/src/testingdb.ts
@@ -16,7 +16,7 @@
 import { createSqliteBackend } from "./SqliteBackend.js";
 import { BridgeIDBFactory } from "./bridge-idb.js";
 import { IDBFactory } from "./idbtypes.js";
-import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+import { createNodeHelperSqlite3Impl } from "./node-helper-sqlite3-impl.js";
 
 let idbFactory: IDBFactory | undefined = undefined;
 
@@ -24,7 +24,11 @@ export async function initTestIndexedDB(): Promise<void> {
   // const backend = new MemoryBackend();
   // backend.enableTracing = true;
 
-  const sqlite3Impl = await createNodeSqlite3Impl();
+  const sqlite3Impl = await createNodeHelperSqlite3Impl({
+    enableTracing: false,
+  });
+
+  // const sqlite3Impl = await createNodeBetterSqlite3Impl();
 
   const backend = await createSqliteBackend(sqlite3Impl, {
     filename: ":memory:",
diff --git a/packages/idb-bridge/taler-helper-sqlite3 
b/packages/idb-bridge/taler-helper-sqlite3
new file mode 100755
index 000000000..02a936220
--- /dev/null
+++ b/packages/idb-bridge/taler-helper-sqlite3
@@ -0,0 +1,305 @@
+#!/usr/bin/env python
+
+import sqlite3
+import sys
+import os
+
+print("started sqlite3 helper at", os.getcwd(), file=sys.stderr)
+
+enable_tracing = False
+def trace(*args):
+    print("HELPER", *args, file=sys.stderr)
+
+CMD_HELLO = 1
+CMD_SHUTDOWN = 2
+CMD_OPEN = 3
+CMD_CLOSE = 4
+CMD_PREPARE = 5
+CMD_STMT_GET_ALL = 6
+CMD_STMT_GET_FIRST = 7
+CMD_STMT_RUN = 8
+CMD_EXEC = 9
+
+RESP_OK = 1
+RESP_FAIL = 2
+RESP_ROWLIST = 3
+RESP_RUNRESULT = 4
+RESP_STMT = 5
+
+
+TAG_NULL = 1
+TAG_INT = 2
+TAG_REAL = 3
+TAG_TEXT = 4
+TAG_BLOB = 5
+
+cmdstream = open(0, "rb")
+respstream = open(1, "wb")
+
+db_handles = dict()
+
+# Since python's sqlite3 library does not support prepared statements,
+# we fake it by just storing the string of the statement.
+# Internally, the sqlite3 library does its own caching of
+# prepared statements.
+prep_handles = dict()
+
+
+def write_resp(req_id, cmd, payload=None):
+    if enable_tracing:
+        trace("sending response to request", req_id)
+    outlen = 4 + 4 + 1 + (0 if payload is None else len(payload))
+    respstream.write(outlen.to_bytes(4))
+    respstream.write(req_id.to_bytes(4))
+    respstream.write(cmd.to_bytes(1))
+    if payload is not None:
+        respstream.write(payload)
+    respstream.flush()
+
+
+dbconn = None
+
+
+class PacketWriter:
+    def __init__(self):
+        self.chunks = []
+
+    def write_string(self, s):
+        buf = s.encode("utf-8")
+        self.write_uint32(len(buf))
+        self.write_raw_bytes(buf)
+
+    def write_bytes(self, buf):
+        self.write_uint32(len(buf))
+        self.write_raw_bytes(buf)
+
+    def write_raw_bytes(self, buf):
+        self.chunks.append(buf)
+
+    def write_uint8(self, n):
+        self.chunks.append(n.to_bytes(1))
+
+    def write_uint32(self, n):
+        self.chunks.append(n.to_bytes(4))
+
+    def write_uint16(self, n):
+        self.chunks.append(n.to_bytes(2))
+
+    def write_int64(self, n):
+        self.chunks.append(n.to_bytes(8, signed=True))
+
+    def write_rowlist(self, rows, description):
+        self.write_uint16(len(rows))
+        self.write_uint16(len(description))
+        for desc in description:
+            col_name = desc[0]
+            self.write_string(col_name)
+
+        if len(description) == 0 or len(rows) == 0:
+            return
+
+        for row in rows:
+            if len(row) != len(description):
+                raise Error("invariant violated")
+            for val in row:
+                if val is None:
+                    self.write_uint8(TAG_NULL)
+                elif isinstance(val, str):
+                    self.write_uint8(TAG_TEXT)
+                    self.write_string(val)
+                elif isinstance(val, bytes):
+                    self.write_uint8(TAG_BLOB)
+                    self.write_bytes(val)
+                elif isinstance(val, int):
+                    self.write_uint8(TAG_INT)
+                    self.write_int64(val)
+                else:
+                    raise Error("unknown col type")
+
+    def reap(self):
+        return b"".join(self.chunks)
+
+
+class PacketReader:
+    def __init__(self, data):
+        self.data = data
+        self.pos = 0
+
+    def read_string(self):
+        l = self.read_uint32()
+        d = self.data[self.pos : self.pos + l]
+        self.pos += l
+        return d.decode("utf-8")
+
+    def read_blob(self):
+        l = self.read_uint32()
+        d = self.data[self.pos : self.pos + l]
+        self.pos += l
+        return d
+
+    def read_uint16(self):
+        d = int.from_bytes(self.data[self.pos : self.pos + 2])
+        self.pos += 2
+        return d
+
+    def read_uint32(self):
+        d = int.from_bytes(self.data[self.pos : self.pos + 4])
+        self.pos += 4
+        return d
+
+    def read_int64(self):
+        d = int.from_bytes(self.data[self.pos : self.pos + 8], signed=True)
+        self.pos += 8
+        return d
+
+    def read_uint8(self):
+        d = self.data[self.pos]
+        self.pos += 1
+        return d
+
+    def read_params(self):
+        num_args = pr.read_uint16()
+        params = dict()
+        for x in range(num_args):
+            name = pr.read_string()
+            tag = pr.read_uint8()
+            if tag == TAG_NULL:
+                params[name] = None
+                continue
+            if tag == TAG_INT:
+                params[name] = pr.read_int64()
+                continue
+            if tag == TAG_TEXT:
+                params[name] = pr.read_string()
+                continue
+            if tag == TAG_BLOB:
+                params[name] = pr.read_blob()
+                continue
+            raise Error("tag not understood")
+        return params
+
+def read_exactly(n):
+    buf = cmdstream.read(n)
+    if len(buf) != n:
+        raise Error("incomplete message") 
+    return buf
+
+def handle_query_failure(req_id, e):
+    pw = PacketWriter()
+    pw.write_string(str(e))
+    pw.write_uint16(e.sqlite_errorcode)
+    pw.write_string(e.sqlite_errorname)
+    write_resp(req_id, RESP_FAIL, pw.reap())
+
+while True:
+    if enable_tracing:
+        trace("reading command")
+    buf_sz = cmdstream.read(4)
+    if len(buf_sz) == 0:
+        trace("end of input reached")
+        sys.exit(0)
+    elif len(buf_sz) != 4:
+        raise Error("incomplete message")
+    size = int.from_bytes(buf_sz)
+    req_id = int.from_bytes(read_exactly(4))
+    rest = read_exactly(size - 8)
+    pr = PacketReader(rest)
+    cmd = pr.read_uint8()
+    if enable_tracing:
+        trace("received command:", cmd, "request_id:", req_id)
+
+    if cmd == CMD_HELLO:
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_OPEN:
+        # open
+        if dbconn is not None:
+            raise Error("DB already connected")
+        db_handle = pr.read_uint16()
+        filename = pr.read_string()
+        dbconn = sqlite3.connect(filename, autocommit=True, 
isolation_level=None)
+        # Make sure we are not in a transaction
+        dbconn.commit()
+        db_handles[db_handle] = dbconn
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_CLOSE:
+        # close
+        dbconn.close()
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_PREPARE:
+        db_id = pr.read_uint16()
+        prep_id = pr.read_uint16()
+        sql = pr.read_string()
+        prep_handles[prep_id] = (dbconn, sql)
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_STMT_GET_ALL:
+        prep_id = pr.read_uint16()
+        params = pr.read_params()
+        dbconn, stmt = prep_handles[prep_id]
+        cursor = dbconn.cursor()
+        try:
+            res = cursor.execute(stmt, params)
+            rows = cursor.fetchall()
+        except sqlite3.Error as e:
+            handle_query_failure(req_id, e)
+            continue
+        pw = PacketWriter()
+        pw.write_rowlist(rows, cursor.description)
+        write_resp(req_id, RESP_ROWLIST, pw.reap())
+        continue
+    if cmd == CMD_STMT_GET_FIRST:
+        prep_id = pr.read_uint16()
+        params = pr.read_params()
+        dbconn, stmt = prep_handles[prep_id]
+        cursor = dbconn.cursor()
+        try:
+            res = cursor.execute(stmt, params)
+            row = cursor.fetchone()
+        except sqlite3.Error as e:
+            handle_query_failure(req_id, e)
+            continue
+        pw = PacketWriter()
+        rows = [row] if row is not None else []
+        pw.write_rowlist(rows, cursor.description)
+        write_resp(req_id, RESP_ROWLIST, pw.reap())
+        continue
+    if cmd == CMD_STMT_RUN:
+        if enable_tracing:
+            trace("running statement")
+        prep_id = pr.read_uint16()
+        params = pr.read_params()
+        dbconn, stmt = prep_handles[prep_id]
+        cursor = dbconn.cursor()
+        try:
+            res = cursor.execute(stmt, params)
+        except sqlite3.Error as e:
+            if enable_tracing:
+                trace("got sqlite error")
+            handle_query_failure(req_id, e)
+            continue
+        if enable_tracing:
+            trace("running query succeeded")
+        if cursor.lastrowid is None:
+            write_resp(req_id, RESP_OK)
+        else:
+            pw = PacketWriter()
+            pw.write_int64(cursor.lastrowid)
+            payload = pw.reap()
+            write_resp(req_id, RESP_RUNRESULT, payload)
+        continue
+    if cmd == CMD_EXEC:
+        db_id = pr.read_uint16()
+        sql = pr.read_string()
+        dbconn = db_handles[db_id]
+        try:
+            dbconn.executescript(sql)
+        except sqlite3.Error as e:
+            handle_query_failure(req_id, e)
+            continue
+        write_resp(req_id, RESP_OK)
+        continue
+
+    print("unknown command", file=sys.stderr)
diff --git a/packages/taler-wallet-cli/Makefile 
b/packages/taler-wallet-cli/Makefile
index c8529c768..286988ab1 100644
--- a/packages/taler-wallet-cli/Makefile
+++ b/packages/taler-wallet-cli/Makefile
@@ -35,8 +35,7 @@ install-nodeps:
        install ./dist/taler-wallet-cli-bundled.cjs $(DESTDIR)$(NODEDIR)/dist/
        install ./dist/taler-wallet-cli-bundled.cjs.map 
$(DESTDIR)$(NODEDIR)/dist/
        install ./bin/taler-wallet-cli.mjs $(DESTDIR)$(NODEDIR)/bin/
-       install 
../idb-bridge/node_modules/better-sqlite3/build/Release/better_sqlite3.node 
$(DESTDIR)$(LIBDIR)/build/ \
-               || echo "sqlite3 unavailable, better-sqlite3 native module not 
found"
+       install ../idb-bridge/taler-helper-sqlite3 
$(DESTDIR)$(BINDIR)/taler-helper-sqlite3
        ln -sf 
../lib/taler-wallet-cli/node_modules/taler-wallet-cli/bin/taler-wallet-cli.mjs 
$(DESTDIR)$(BINDIR)/taler-wallet-cli
 deps:
        pnpm install --frozen-lockfile --filter @gnu-taler/taler-wallet-cli...
diff --git a/packages/taler-wallet-core/src/host-impl.node.ts 
b/packages/taler-wallet-core/src/host-impl.node.ts
index 9eddd151e..4fc54a39c 100644
--- a/packages/taler-wallet-core/src/host-impl.node.ts
+++ b/packages/taler-wallet-core/src/host-impl.node.ts
@@ -26,10 +26,10 @@
 import {
   BridgeIDBFactory,
   MemoryBackend,
+  createNodeHelperSqlite3Impl,
   createSqliteBackend,
   shimIndexedDB,
 } from "@gnu-taler/idb-bridge";
-import { createNodeSqlite3Impl } from 
"@gnu-taler/idb-bridge/node-sqlite3-bindings";
 import {
   Logger,
   SetTimeoutTimerAPI,
@@ -113,7 +113,7 @@ async function makeSqliteDb(
   } else {
     BridgeIDBFactory.enableTracing = false;
   }
-  const imp = await createNodeSqlite3Impl();
+  const imp = await createNodeHelperSqlite3Impl();
   const dbFilename = getSqlite3FilenameFromStoragePath(
     args.persistentStoragePath,
   );
diff --git a/packages/taler-wallet-core/src/host-impl.qtart.ts 
b/packages/taler-wallet-core/src/host-impl.qtart.ts
index 40210f0eb..5cf7af61a 100644
--- a/packages/taler-wallet-core/src/host-impl.qtart.ts
+++ b/packages/taler-wallet-core/src/host-impl.qtart.ts
@@ -66,32 +66,32 @@ export async function createQtartSqlite3Impl(): 
Promise<Sqlite3Interface> {
     throw Error("globalThis._qtart not defined");
   }
   return {
-    open(filename: string) {
+    async open(filename: string) {
       const internalDbHandle = tart.sqlite3Open(filename);
       return {
         internalDbHandle,
-        close() {
+        async close() {
           tart.sqlite3Close(internalDbHandle);
         },
-        prepare(stmtStr): Sqlite3Statement {
+        async prepare(stmtStr): Promise<Sqlite3Statement> {
           const stmtHandle = tart.sqlite3Prepare(internalDbHandle, stmtStr);
           return {
             internalStatement: stmtHandle,
-            getAll(params): ResultRow[] {
+            async getAll(params): Promise<ResultRow[]> {
               numStmt++;
               return tart.sqlite3StmtGetAll(stmtHandle, params);
             },
-            getFirst(params): ResultRow | undefined {
+            async getFirst(params): Promise<ResultRow | undefined> {
               numStmt++;
               return tart.sqlite3StmtGetFirst(stmtHandle, params);
             },
-            run(params) {
+            async run(params) {
               numStmt++;
               return tart.sqlite3StmtRun(stmtHandle, params);
             },
           };
         },
-        exec(sqlStr): void {
+        async exec(sqlStr): Promise<void> {
           numStmt++;
           tart.sqlite3Exec(internalDbHandle, sqlStr);
         },

-- 
To stop receiving notification emails like this one, please contact
gnunet@gnunet.org.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]