gnunet-svn
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[taler-wallet-core] branch master updated (6abeeced4 -> 6bb967f15)


From: gnunet
Subject: [taler-wallet-core] branch master updated (6abeeced4 -> 6bb967f15)
Date: Fri, 20 Dec 2024 11:13:39 +0100

This is an automated email from the git hooks/post-receive script.

dold pushed a change to branch master
in repository wallet-core.

    from 6abeeced4 -bump debian
     new 7caeadc84 bump better-sqlite3 version
     new 8e11ead34 new sqlite3 backend, many DB bug fixes
     new 6bb967f15 get rid of better-sqlite3 dependency

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 Makefile                                           |   2 -
 packages/idb-bridge/check.js                       |  18 -
 packages/idb-bridge/package.json                   |   7 +-
 packages/idb-bridge/src/MemoryBackend.ts           |   2 +-
 packages/idb-bridge/src/SqliteBackend.test.ts      |   6 +-
 packages/idb-bridge/src/SqliteBackend.ts           | 846 ++++++++++++---------
 packages/idb-bridge/src/backend-interface.ts       |   2 +-
 packages/idb-bridge/src/bench.ts                   |   4 +-
 packages/idb-bridge/src/bridge-idb.ts              |  67 +-
 .../src/idb-wpt-ported/idbfactory-open.test.ts     |   9 +-
 packages/idb-bridge/src/index.ts                   |  24 +-
 .../src/node-helper-sqlite3-impl.test.ts           |  82 ++
 .../idb-bridge/src/node-helper-sqlite3-impl.ts     | 572 ++++++++++++++
 packages/idb-bridge/src/node-sqlite3-impl.ts       |  84 --
 packages/idb-bridge/src/sqlite3-interface.ts       |  14 +-
 packages/idb-bridge/src/testingdb.ts               |   8 +-
 packages/idb-bridge/taler-helper-sqlite3           | 305 ++++++++
 packages/taler-wallet-cli/Makefile                 |   3 +-
 packages/taler-wallet-cli/build-qtart.mjs          |   2 +-
 packages/taler-wallet-core/src/host-impl.node.ts   |   6 +-
 packages/taler-wallet-core/src/host-impl.qtart.ts  |  14 +-
 packages/taler-wallet-embedded/build.mjs           |   2 +-
 packages/web-util/build.mjs                        |   4 +-
 pnpm-lock.yaml                                     | 138 +---
 24 files changed, 1576 insertions(+), 645 deletions(-)
 delete mode 100644 packages/idb-bridge/check.js
 create mode 100644 packages/idb-bridge/src/node-helper-sqlite3-impl.test.ts
 create mode 100644 packages/idb-bridge/src/node-helper-sqlite3-impl.ts
 delete mode 100644 packages/idb-bridge/src/node-sqlite3-impl.ts
 create mode 100755 packages/idb-bridge/taler-helper-sqlite3

diff --git a/Makefile b/Makefile
index 58677ac5b..bd6101293 100644
--- a/Makefile
+++ b/Makefile
@@ -276,7 +276,6 @@ install:
        $(MAKE) -C packages/merchant-backoffice-ui install-nodeps
        $(MAKE) -C packages/aml-backoffice-ui install-nodeps
        $(MAKE) -C packages/auditor-backoffice-ui install-nodeps
-       node packages/idb-bridge/check.js
 
 
 .PHONY: install-tools
@@ -287,7 +286,6 @@ install-tools:
        $(MAKE) -C packages/taler-wallet-cli install-nodeps
        $(MAKE) -C packages/anastasis-cli install-nodeps
        $(MAKE) -C packages/taler-harness install-nodeps
-       node packages/idb-bridge/check.js
 
 .PHONY: check-migration
 
diff --git a/packages/idb-bridge/check.js b/packages/idb-bridge/check.js
deleted file mode 100644
index 646676271..000000000
--- a/packages/idb-bridge/check.js
+++ /dev/null
@@ -1,18 +0,0 @@
-import bsq from "better-sqlite3";
-
-// Check if we can load the native module of better-sqlite3.
-// If not, give a nice error message.
-
-try {
-  const db = bsq(":memory:");
-} catch (e) {
-  console.log(e.message);
-  console.warn()
-  console.warn("WARNING: Unable to use better-sqlite3.");
-  console.warn("Please run \n\n  pnpm rebuild --recursive better-sqlite3 
--loglevel debug\n");
-  console.warn("to rebuild the native module.");
-  console.warn()
-  console.warn("Alternatively, check\n\n  
https://nodejs.org/en/about/previous-releases\n";);
-  console.warn("for a node version compatible with the native module.");
-  process.exit(1);
-}
diff --git a/packages/idb-bridge/package.json b/packages/idb-bridge/package.json
index f18b0941b..ab4f73138 100644
--- a/packages/idb-bridge/package.json
+++ b/packages/idb-bridge/package.json
@@ -20,8 +20,8 @@
     ".": {
       "default": "./lib/index.js"
     },
-    "./node-sqlite3-bindings": {
-      "default": "./lib/node-sqlite3-impl.js"
+    "./node-helper-sqlite3-impl": {
+      "default": "./lib/node-helper-sqlite3-impl.js"
     }
   },
   "devDependencies": {
@@ -36,8 +36,5 @@
   },
   "ava": {
     "failFast": true
-  },
-  "optionalDependencies": {
-    "better-sqlite3": "10.0.0"
   }
 }
diff --git a/packages/idb-bridge/src/MemoryBackend.ts 
b/packages/idb-bridge/src/MemoryBackend.ts
index 526920a9f..a4e3bdb7a 100644
--- a/packages/idb-bridge/src/MemoryBackend.ts
+++ b/packages/idb-bridge/src/MemoryBackend.ts
@@ -1448,7 +1448,7 @@ export class MemoryBackend implements Backend {
     }
   }
 
-  rollback(btx: DatabaseTransaction): void {
+  async rollback(btx: DatabaseTransaction): Promise<void> {
     if (this.enableTracing) {
       console.log(`TRACING: rollback`);
     }
diff --git a/packages/idb-bridge/src/SqliteBackend.test.ts 
b/packages/idb-bridge/src/SqliteBackend.test.ts
index 612cb9d4b..85f4b5596 100644
--- a/packages/idb-bridge/src/SqliteBackend.test.ts
+++ b/packages/idb-bridge/src/SqliteBackend.test.ts
@@ -15,11 +15,11 @@
  */
 
 import test from "ava";
+import * as fs from "node:fs";
 import { createSqliteBackend } from "./SqliteBackend.js";
 import { ResultLevel, StoreLevel } from "./backend-interface.js";
 import { BridgeIDBKeyRange } from "./bridge-idb.js";
-import * as fs from "node:fs";
-import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+import { createNodeHelperSqlite3Impl } from "./node-helper-sqlite3-impl.js";
 
 test("sqlite3 backend", async (t) => {
   const filename = "mytestdb.sqlite3";
@@ -29,7 +29,7 @@ test("sqlite3 backend", async (t) => {
     // Do nothing.
   }
   try {
-    const sqlite3Impl = await createNodeSqlite3Impl();
+    const sqlite3Impl = await createNodeHelperSqlite3Impl();
     const backend = await createSqliteBackend(sqlite3Impl, {
       filename,
     });
diff --git a/packages/idb-bridge/src/SqliteBackend.ts 
b/packages/idb-bridge/src/SqliteBackend.ts
index 8213de366..cecf0794a 100644
--- a/packages/idb-bridge/src/SqliteBackend.ts
+++ b/packages/idb-bridge/src/SqliteBackend.ts
@@ -77,28 +77,43 @@ interface ConnectionInfo {
   // Database that the connection has
   // connected to.
   databaseName: string;
+
+  storeMap: Map<string, MyStoreMeta>;
+  storeList: MyStoreMeta[];
 }
 
 interface TransactionInfo {
   connectionCookie: string;
 }
 
-interface ScopeIndexInfo {
-  indexId: SqliteRowid;
+interface MyIndexMeta {
+  indexId: SqliteRowid | undefined;
   keyPath: IDBKeyPath | IDBKeyPath[];
   multiEntry: boolean;
   unique: boolean;
+
+  currentName: string | undefined;
+  nameDirty: boolean;
 }
 
-interface ScopeInfo {
+interface MyStoreMeta {
   /**
    * Internal ID of the object store.
    * Used for fast retrieval, since it's the
    * primary key / rowid of the sqlite table.
    */
-  objectStoreId: SqliteRowid;
+  objectStoreId: SqliteRowid | undefined;
+
+  keyPath: string | string[] | null;
+
+  autoIncrement: boolean;
 
-  indexMap: Map<string, ScopeIndexInfo>;
+  indexList: MyIndexMeta[];
+  indexMap: Map<string, MyIndexMeta>;
+
+  currentName: string | undefined;
+
+  nameDirty: boolean;
 }
 
 interface IndexIterPos {
@@ -204,7 +219,7 @@ export function expectDbNumber(
   assertDbInvariant(typeof resultRow === "object" && resultRow != null);
   const res = (resultRow as any)[name];
   if (typeof res !== "number") {
-    throw Error("unexpected type from database");
+    throw Error("unexpected type from database (expected number)");
   }
   return res;
 }
@@ -213,7 +228,7 @@ export function expectDbString(resultRow: unknown, name: 
string): string {
   assertDbInvariant(typeof resultRow === "object" && resultRow != null);
   const res = (resultRow as any)[name];
   if (typeof res !== "string") {
-    throw Error("unexpected type from database");
+    throw Error("unexpected type from database (expected string)");
   }
   return res;
 }
@@ -228,7 +243,7 @@ export function expectDbStringOrNull(
     return null;
   }
   if (typeof res !== "string") {
-    throw Error("unexpected type from database");
+    throw Error("unexpected type from database (expected string or null)");
   }
   return res;
 }
@@ -255,16 +270,8 @@ export class SqliteBackend implements Backend {
    */
   private transactionDoneCond: AsyncCondition = new AsyncCondition();
 
-  /**
-   * Is the connection blocked because either an open request
-   * or delete request is being processed?
-   */
-  private connectionBlocked: boolean = false;
-
   private txLevel: TransactionLevel = TransactionLevel.None;
 
-  private txScope: Map<string, ScopeInfo> = new Map();
-
   private connectionMap: Map<string, ConnectionInfo> = new Map();
 
   private transactionMap: Map<string, TransactionInfo> = new Map();
@@ -278,12 +285,12 @@ export class SqliteBackend implements Backend {
     public db: Sqlite3Database,
   ) {}
 
-  private _prep(sql: string): Sqlite3Statement {
+  private async _prep(sql: string): Promise<Sqlite3Statement> {
     const stmt = this.sqlPrepCache.get(sql);
     if (stmt) {
       return stmt;
     }
-    const newStmt = this.db.prepare(sql);
+    const newStmt = await this.db.prepare(sql);
     this.sqlPrepCache.set(sql, newStmt);
     return newStmt;
   }
@@ -303,7 +310,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Read) {
       throw Error("only allowed in read transaction");
     }
-    const scopeInfo = this.txScope.get(req.objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(req.objectStoreName);
     if (!scopeInfo) {
       throw Error("object store not in scope");
     }
@@ -319,6 +326,8 @@ export class SqliteBackend implements Backend {
       }
     }
 
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+
     if (this.enableTracing) {
       console.log(
         `querying index os=${req.objectStoreName}, idx=${req.indexName}, 
direction=${req.direction}`,
@@ -331,7 +340,7 @@ export class SqliteBackend implements Backend {
     const queryUnique =
       req.direction === "nextunique" || req.direction === "prevunique";
 
-    const indexId = indexInfo.indexId;
+    const indexId = await this._provideIndex(connInfo, scopeInfo, indexInfo);
     const indexUnique = indexInfo.unique;
 
     let numResults = 0;
@@ -345,7 +354,7 @@ export class SqliteBackend implements Backend {
 
     const backendThis = this;
 
-    function packResult() {
+    async function packResult() {
       if (req.resultLevel > ResultLevel.OnlyCount) {
         for (let i = 0; i < encPrimaryKeys.length; i++) {
           primaryKeys.push(deserializeKey(encPrimaryKeys[i]));
@@ -355,8 +364,8 @@ export class SqliteBackend implements Backend {
         }
         if (req.resultLevel === ResultLevel.Full) {
           for (let i = 0; i < encPrimaryKeys.length; i++) {
-            const val = backendThis._getObjectValue(
-              scopeInfo!.objectStoreId,
+            const val = await backendThis._getObjectValue(
+              objectStoreId,
               encPrimaryKeys[i],
             );
             if (!val) {
@@ -390,7 +399,7 @@ export class SqliteBackend implements Backend {
       };
     }
 
-    let currentPos = this._startIndex({
+    let currentPos = await this._startIndex({
       indexId,
       indexUnique,
       queryUnique,
@@ -412,7 +421,7 @@ export class SqliteBackend implements Backend {
       const advancePrimaryKey = req.advancePrimaryKey
         ? serializeKey(req.advancePrimaryKey)
         : undefined;
-      currentPos = this._continueIndex({
+      currentPos = await this._continueIndex({
         indexId,
         indexUnique,
         queryUnique,
@@ -437,7 +446,7 @@ export class SqliteBackend implements Backend {
       const lastObjectPosition = req.lastObjectStorePosition
         ? serializeKey(req.lastObjectStorePosition)
         : undefined;
-      currentPos = this._continueIndex({
+      currentPos = await this._continueIndex({
         indexId,
         indexUnique,
         queryUnique,
@@ -468,7 +477,7 @@ export class SqliteBackend implements Backend {
       if (targetKeyObj != null) {
         const targetKey = serializeKey(targetKeyObj);
         const inclusive = forward ? !req.range.lowerOpen : 
!req.range.upperOpen;
-        currentPos = this._continueIndex({
+        currentPos = await this._continueIndex({
           indexId,
           indexUnique,
           queryUnique,
@@ -515,7 +524,7 @@ export class SqliteBackend implements Backend {
         encIndexKeys.push(currentPos.indexPos);
       }
 
-      currentPos = backendThis._continueIndex({
+      currentPos = await backendThis._continueIndex({
         indexId,
         indexUnique,
         forward,
@@ -534,7 +543,7 @@ export class SqliteBackend implements Backend {
   // in the direction specified by "forward".
   // Do nothing if the current position is already past the
   // target position.
-  _continueIndex(req: {
+  async _continueIndex(req: {
     indexId: SqliteRowid;
     indexUnique: boolean;
     queryUnique: boolean;
@@ -543,7 +552,7 @@ export class SqliteBackend implements Backend {
     currentPos: IndexIterPos | null | undefined;
     targetIndexKey: Uint8Array;
     targetObjectKey?: Uint8Array;
-  }): IndexIterPos | undefined {
+  }): Promise<IndexIterPos | undefined> {
     const currentPos = req.currentPos;
     const forward = req.forward;
     const dir = forward ? 1 : -1;
@@ -580,50 +589,52 @@ export class SqliteBackend implements Backend {
     if (req.indexUnique) {
       if (req.forward) {
         if (req.inclusive) {
-          stmt = this._prep(sqlUniqueIndexDataContinueForwardInclusive);
+          stmt = await this._prep(sqlUniqueIndexDataContinueForwardInclusive);
         } else {
-          stmt = this._prep(sqlUniqueIndexDataContinueForwardStrict);
+          stmt = await this._prep(sqlUniqueIndexDataContinueForwardStrict);
         }
       } else {
         if (req.inclusive) {
-          stmt = this._prep(sqlUniqueIndexDataContinueBackwardInclusive);
+          stmt = await this._prep(sqlUniqueIndexDataContinueBackwardInclusive);
         } else {
-          stmt = this._prep(sqlUniqueIndexDataContinueBackwardStrict);
+          stmt = await this._prep(sqlUniqueIndexDataContinueBackwardStrict);
         }
       }
     } else {
       if (req.forward) {
         if (req.queryUnique || req.targetObjectKey == null) {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueForwardInclusiveUnique);
+            stmt = await 
this._prep(sqlIndexDataContinueForwardInclusiveUnique);
           } else {
-            stmt = this._prep(sqlIndexDataContinueForwardStrictUnique);
+            stmt = await this._prep(sqlIndexDataContinueForwardStrictUnique);
           }
         } else {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueForwardInclusive);
+            stmt = await this._prep(sqlIndexDataContinueForwardInclusive);
           } else {
-            stmt = this._prep(sqlIndexDataContinueForwardStrict);
+            stmt = await this._prep(sqlIndexDataContinueForwardStrict);
           }
         }
       } else {
         if (req.queryUnique || req.targetObjectKey == null) {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueBackwardInclusiveUnique);
+            stmt = await this._prep(
+              sqlIndexDataContinueBackwardInclusiveUnique,
+            );
           } else {
-            stmt = this._prep(sqlIndexDataContinueBackwardStrictUnique);
+            stmt = await this._prep(sqlIndexDataContinueBackwardStrictUnique);
           }
         } else {
           if (req.inclusive) {
-            stmt = this._prep(sqlIndexDataContinueBackwardInclusive);
+            stmt = await this._prep(sqlIndexDataContinueBackwardInclusive);
           } else {
-            stmt = this._prep(sqlIndexDataContinueBackwardStrict);
+            stmt = await this._prep(sqlIndexDataContinueBackwardStrict);
           }
         }
       }
     }
 
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       index_id: req.indexId,
       index_key: req.targetIndexKey,
       object_key: req.targetObjectKey,
@@ -653,32 +664,32 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  _startIndex(req: {
+  async _startIndex(req: {
     indexId: SqliteRowid;
     indexUnique: boolean;
     queryUnique: boolean;
     forward: boolean;
-  }): IndexIterPos | undefined {
+  }): Promise<IndexIterPos | undefined> {
     let stmt: Sqlite3Statement;
     if (req.indexUnique) {
       if (req.forward) {
-        stmt = this._prep(sqlUniqueIndexDataStartForward);
+        stmt = await this._prep(sqlUniqueIndexDataStartForward);
       } else {
-        stmt = this._prep(sqlUniqueIndexDataStartBackward);
+        stmt = await this._prep(sqlUniqueIndexDataStartBackward);
       }
     } else {
       if (req.forward) {
-        stmt = this._prep(sqlIndexDataStartForward);
+        stmt = await this._prep(sqlIndexDataStartForward);
       } else {
         if (req.queryUnique) {
-          stmt = this._prep(sqlIndexDataStartBackwardUnique);
+          stmt = await this._prep(sqlIndexDataStartBackwardUnique);
         } else {
-          stmt = this._prep(sqlIndexDataStartBackward);
+          stmt = await this._prep(sqlIndexDataStartBackward);
         }
       }
     }
 
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       index_id: req.indexId,
     });
 
@@ -715,7 +726,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Read) {
       throw Error("only allowed in read transaction");
     }
-    const scopeInfo = this.txScope.get(req.objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(req.objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -727,12 +738,14 @@ export class SqliteBackend implements Backend {
     const forward: boolean =
       req.direction === "next" || req.direction === "nextunique";
 
-    let currentKey = this._startObjectKey(scopeInfo.objectStoreId, forward);
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+
+    let currentKey = await this._startObjectKey(objectStoreId, forward);
 
     if (req.advancePrimaryKey != null) {
       const targetKey = serializeKey(req.advancePrimaryKey);
-      currentKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currentKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         forward,
         inclusive: true,
         currentKey,
@@ -742,8 +755,8 @@ export class SqliteBackend implements Backend {
 
     if (req.lastObjectStorePosition != null) {
       const targetKey = serializeKey(req.lastObjectStorePosition);
-      currentKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currentKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         forward,
         inclusive: false,
         currentKey,
@@ -756,8 +769,8 @@ export class SqliteBackend implements Backend {
       if (targetKeyObj != null) {
         const targetKey = serializeKey(targetKeyObj);
         const inclusive = forward ? !req.range.lowerOpen : 
!req.range.upperOpen;
-        currentKey = this._continueObjectKey({
-          objectStoreId: scopeInfo.objectStoreId,
+        currentKey = await this._continueObjectKey({
+          objectStoreId: objectStoreId,
           forward,
           inclusive,
           currentKey,
@@ -790,8 +803,8 @@ export class SqliteBackend implements Backend {
         encPrimaryKeys.push(currentKey);
       }
 
-      currentKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currentKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         forward,
         inclusive: false,
         currentKey: null,
@@ -805,8 +818,8 @@ export class SqliteBackend implements Backend {
       }
       if (req.resultLevel === ResultLevel.Full) {
         for (let i = 0; i < encPrimaryKeys.length; i++) {
-          const val = this._getObjectValue(
-            scopeInfo.objectStoreId,
+          const val = await this._getObjectValue(
+            objectStoreId,
             encPrimaryKeys[i],
           );
           if (!val) {
@@ -834,17 +847,17 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  _startObjectKey(
+  async _startObjectKey(
     objectStoreId: number | bigint,
     forward: boolean,
-  ): Uint8Array | null {
+  ): Promise<Uint8Array | null> {
     let stmt: Sqlite3Statement;
     if (forward) {
-      stmt = this._prep(sqlObjectDataStartForward);
+      stmt = await this._prep(sqlObjectDataStartForward);
     } else {
-      stmt = this._prep(sqlObjectDataStartBackward);
+      stmt = await this._prep(sqlObjectDataStartBackward);
     }
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       object_store_id: objectStoreId,
     });
     if (!res) {
@@ -862,13 +875,13 @@ export class SqliteBackend implements Backend {
 
   // Result *must* be past targetKey in the direction
   // specified by "forward".
-  _continueObjectKey(req: {
+  async _continueObjectKey(req: {
     objectStoreId: number | bigint;
     forward: boolean;
     currentKey: Uint8Array | null;
     targetKey: Uint8Array;
     inclusive: boolean;
-  }): Uint8Array | null {
+  }): Promise<Uint8Array | null> {
     const { forward, currentKey, targetKey } = req;
     const dir = forward ? 1 : -1;
     if (currentKey) {
@@ -885,19 +898,19 @@ export class SqliteBackend implements Backend {
 
     if (req.inclusive) {
       if (req.forward) {
-        stmt = this._prep(sqlObjectDataContinueForwardInclusive);
+        stmt = await this._prep(sqlObjectDataContinueForwardInclusive);
       } else {
-        stmt = this._prep(sqlObjectDataContinueBackwardInclusive);
+        stmt = await this._prep(sqlObjectDataContinueBackwardInclusive);
       }
     } else {
       if (req.forward) {
-        stmt = this._prep(sqlObjectDataContinueForward);
+        stmt = await this._prep(sqlObjectDataContinueForward);
       } else {
-        stmt = this._prep(sqlObjectDataContinueBackward);
+        stmt = await this._prep(sqlObjectDataContinueBackward);
       }
     }
 
-    const res = stmt.getFirst({
+    const res = await stmt.getFirst({
       object_store_id: req.objectStoreId,
       x: req.targetKey,
     });
@@ -916,12 +929,12 @@ export class SqliteBackend implements Backend {
     return rkey;
   }
 
-  _getObjectValue(
+  async _getObjectValue(
     objectStoreId: number | bigint,
     key: Uint8Array,
-  ): string | undefined {
-    const stmt = this._prep(sqlObjectDataValueFromKey);
-    const res = stmt.getFirst({
+  ): Promise<string | undefined> {
+    const stmt = await this._prep(sqlObjectDataValueFromKey);
+    const res = await stmt.getFirst({
       object_store_id: objectStoreId,
       key: key,
     });
@@ -943,30 +956,14 @@ export class SqliteBackend implements Backend {
     if (!connInfo) {
       throw Error("connection not found");
     }
-    const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
-      name: objectStoreName,
-      database_name: connInfo.databaseName,
-    });
-    if (!objRes) {
-      throw Error("object store not found");
-    }
-    const objectStoreId = expectDbNumber(objRes, "id");
-    const keyPath = deserializeKeyPath(
-      expectDbStringOrNull(objRes, "key_path"),
-    );
-    const autoInc = expectDbNumber(objRes, "auto_increment");
-    const indexSet: string[] = [];
-    const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
-      object_store_id: objectStoreId,
-    });
-    for (const idxInfo of indexRes) {
-      const indexName = expectDbString(idxInfo, "name");
-      indexSet.push(indexName);
+    const storeMeta = connInfo.storeMap.get(objectStoreName);
+    if (!storeMeta) {
+      return undefined;
     }
     return {
-      keyPath,
-      autoIncrement: autoInc != 0,
-      indexSet,
+      keyPath: storeMeta.keyPath,
+      autoIncrement: storeMeta.autoIncrement,
+      indexSet: [...storeMeta.indexMap.keys()],
     };
   }
 
@@ -980,40 +977,23 @@ export class SqliteBackend implements Backend {
     if (!connInfo) {
       throw Error("connection not found");
     }
-    const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
-      name: objectStoreName,
-      database_name: connInfo.databaseName,
-    });
-    if (!objRes) {
+    const storeMeta = connInfo.storeMap.get(objectStoreName);
+    if (!storeMeta) {
       throw Error("object store not found");
     }
-    const objectStoreId = expectDbNumber(objRes, "id");
-    const idxInfo = this._prep(sqlGetIndexByName).getFirst({
-      object_store_id: objectStoreId,
-      name: indexName,
-    });
-    if (!idxInfo) {
-      throw Error(
-        `index ${indexName} on object store ${objectStoreName} not found`,
-      );
-    }
-    const indexUnique = expectDbNumber(idxInfo, "unique_index");
-    const indexMultiEntry = expectDbNumber(idxInfo, "multientry");
-    const indexKeyPath = deserializeKeyPath(
-      expectDbString(idxInfo, "key_path"),
-    );
-    if (!indexKeyPath) {
-      throw Error("db inconsistent");
+    const indexMeta = storeMeta.indexMap.get(indexName);
+    if (!indexMeta) {
+      return undefined;
     }
     return {
-      keyPath: indexKeyPath,
-      multiEntry: indexMultiEntry != 0,
-      unique: indexUnique != 0,
+      keyPath: indexMeta.keyPath,
+      multiEntry: indexMeta.multiEntry,
+      unique: indexMeta.unique,
     };
   }
 
   async getDatabases(): Promise<BridgeIDBDatabaseInfo[]> {
-    const dbList = this._prep(sqlListDatabases).getAll();
+    const dbList = await (await this._prep(sqlListDatabases)).getAll();
     let res: BridgeIDBDatabaseInfo[] = [];
     for (const r of dbList) {
       res.push({
@@ -1025,9 +1005,10 @@ export class SqliteBackend implements Backend {
     return res;
   }
 
-  private _loadObjectStoreNames(databaseName: string): string[] {
+  private async _loadObjectStoreNames(databaseName: string): Promise<string[]> 
{
     const objectStoreNames: string[] = [];
-    const storesRes = this._prep(sqlGetObjectStoresByDatabase).getAll({
+    const stmt = await this._prep(sqlGetObjectStoresByDatabase);
+    const storesRes = await stmt.getAll({
       database_name: databaseName,
     });
     for (const res of storesRes) {
@@ -1040,38 +1021,77 @@ export class SqliteBackend implements Backend {
     return objectStoreNames;
   }
 
+  async _runSqlBegin(): Promise<void> {
+    const stmt = await this._prep(sqlBegin);
+    await stmt.run();
+  }
+
+  async _runSqlCommit(): Promise<void> {
+    const stmt = await this._prep(sqlCommit);
+    await stmt.run();
+  }
+
+  async _runSqlGetDatabaseVersion(
+    databaseName: string,
+  ): Promise<number | undefined> {
+    const versionRes = await (await 
this._prep(sqlGetDatabaseVersion)).getFirst({
+      name: databaseName,
+    });
+    if (versionRes == undefined) {
+      return undefined;
+    }
+    const verNum = expectDbNumber(versionRes, "version");
+    assertDbInvariant(typeof verNum === "number");
+    return verNum;
+  }
+
+  async _runSqlCreateDatabase(databaseName: string): Promise<void> {
+    const stmt = await this._prep(sqlCreateDatabase);
+    await stmt.run({ name: databaseName });
+  }
+
   async connectDatabase(databaseName: string): Promise<ConnectResult> {
     const connectionId = this.connectionIdCounter++;
     const connectionCookie = `connection-${connectionId}`;
 
     // Wait until no transaction is active anymore.
     while (1) {
+      if (this.enableTracing) {
+        console.log(`connectDatabase - txLevel is ${this.txLevel}`);
+      }
       if (this.txLevel == TransactionLevel.None) {
         break;
       }
       await this.transactionDoneCond.wait();
     }
 
-    this._prep(sqlBegin).run();
-    const versionRes = this._prep(sqlGetDatabaseVersion).getFirst({
-      name: databaseName,
-    });
-    let ver: number;
-    if (versionRes == undefined) {
-      this._prep(sqlCreateDatabase).run({ name: databaseName });
+    this.txLevel = TransactionLevel.Write;
+
+    await this._runSqlBegin();
+    let ver = await this._runSqlGetDatabaseVersion(databaseName);
+    if (ver == null) {
+      await this._runSqlCreateDatabase(databaseName);
       ver = 0;
-    } else {
-      const verNum = expectDbNumber(versionRes, "version");
-      assertDbInvariant(typeof verNum === "number");
-      ver = verNum;
     }
-    const objectStoreNames: string[] = 
this._loadObjectStoreNames(databaseName);
 
-    this._prep(sqlCommit).run();
+    const objectStoreNames: string[] =
+      await this._loadObjectStoreNames(databaseName);
+    await this._runSqlCommit();
 
-    this.connectionMap.set(connectionCookie, {
+    const connInfo = {
       databaseName: databaseName,
-    });
+      storeList: [],
+      storeMap: new Map(),
+    };
+
+    this.connectionMap.set(connectionCookie, connInfo);
+
+    for (const storeName of objectStoreNames) {
+      await this._loadScopeInfo(connInfo, storeName);
+    }
+
+    this.txLevel = TransactionLevel.None;
+    this.transactionDoneCond.trigger();
 
     return {
       conn: {
@@ -1082,8 +1102,11 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  private _loadScopeInfo(connInfo: ConnectionInfo, storeName: string): void {
-    const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+  private async _loadScopeInfo(
+    connInfo: ConnectionInfo,
+    storeName: string,
+  ): Promise<void> {
+    const objRes = await (await 
this._prep(sqlGetObjectStoreMetaByName)).getFirst({
       name: storeName,
       database_name: connInfo.databaseName,
     });
@@ -1091,13 +1114,18 @@ export class SqliteBackend implements Backend {
       throw Error("object store not found");
     }
     const objectStoreId = expectDbNumber(objRes, "id");
-    const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
+    const objectStoreAutoIncrement = expectDbNumber(objRes, "auto_increment");
+    const objectStoreKeyPath = deserializeKeyPath(
+      expectDbStringOrNull(objRes, "key_path"),
+    );
+    const indexRes = await (await 
this._prep(sqlGetIndexesByObjectStoreId)).getAll({
       object_store_id: objectStoreId,
     });
     if (!indexRes) {
       throw Error("db inconsistent");
     }
-    const indexMap = new Map<string, ScopeIndexInfo>();
+    const indexList: MyIndexMeta[] = [];
+    const indexMap = new Map<string, MyIndexMeta>();
     for (const idxInfo of indexRes) {
       const indexId = expectDbNumber(idxInfo, "id");
       const indexName = expectDbString(idxInfo, "name");
@@ -1109,17 +1137,28 @@ export class SqliteBackend implements Backend {
       if (!indexKeyPath) {
         throw Error("db inconsistent");
       }
-      indexMap.set(indexName, {
+      const indexMeta: MyIndexMeta = {
         indexId,
         keyPath: indexKeyPath,
         multiEntry: indexMultiEntry != 0,
         unique: indexUnique != 0,
-      });
+        currentName: indexName,
+        nameDirty: false,
+      };
+      indexList.push(indexMeta);
+      indexMap.set(indexName, indexMeta);
     }
-    this.txScope.set(storeName, {
+    const storeMeta: MyStoreMeta = {
       objectStoreId,
       indexMap,
-    });
+      indexList,
+      autoIncrement: objectStoreAutoIncrement != 0,
+      keyPath: objectStoreKeyPath,
+      currentName: storeName,
+      nameDirty: false,
+    };
+    connInfo.storeList.push(storeMeta);
+    connInfo.storeMap.set(storeName, storeMeta);
   }
 
   async beginTransaction(
@@ -1148,29 +1187,20 @@ export class SqliteBackend implements Backend {
       }
     }
 
-    this._prep(sqlBegin).run();
     if (mode === "readonly") {
       this.txLevel = TransactionLevel.Read;
     } else if (mode === "readwrite") {
       this.txLevel = TransactionLevel.Write;
+    } else {
+      throw Error("not supported");
     }
 
+    await this._runSqlBegin();
+
     this.transactionMap.set(transactionCookie, {
       connectionCookie: conn.connectionCookie,
     });
 
-    // FIXME: We should check this
-    // if (this.txScope.size != 0) {
-    //   // Something didn't clean up!
-    //   throw Error("scope not empty");
-    // }
-    this.txScope.clear();
-
-    // FIXME: Use cached info from connection?
-    for (const storeName of objectStores) {
-      this._loadScopeInfo(connInfo, storeName);
-    }
-
     return {
       transactionCookie,
     };
@@ -1198,41 +1228,34 @@ export class SqliteBackend implements Backend {
       await this.transactionDoneCond.wait();
     }
 
-    // FIXME: We should check this
-    // if (this.txScope.size != 0) {
-    //   // Something didn't clean up!
-    //   throw Error("scope not empty");
-    // }
-    this.txScope.clear();
-
     if (this.enableTracing) {
       console.log(`version change transaction unblocked`);
     }
 
-    this._prep(sqlBegin).run();
     this.txLevel = TransactionLevel.VersionChange;
-
     this.transactionMap.set(transactionCookie, {
       connectionCookie: conn.connectionCookie,
     });
 
-    this._prep(sqlUpdateDbVersion).run({
-      name: connInfo.databaseName,
-      version: newVersion,
-    });
-
-    const objectStoreNames = this._loadObjectStoreNames(connInfo.databaseName);
-
-    // FIXME: Use cached info from connection?
-    for (const storeName of objectStoreNames) {
-      this._loadScopeInfo(connInfo, storeName);
-    }
+    await this._runSqlBegin();
+    await this._runSqlUpdateDbVersion(connInfo.databaseName, newVersion);
 
     return {
       transactionCookie,
     };
   }
 
+  async _runSqlUpdateDbVersion(
+    databaseName: string,
+    newVersion: number,
+  ): Promise<void> {
+    const stmt = await this._prep(sqlUpdateDbVersion);
+    await stmt.run({
+      name: databaseName,
+      version: newVersion,
+    });
+  }
+
   async deleteDatabase(databaseName: string): Promise<void> {
     // FIXME: Wait until connection queue is not blocked
     // FIXME: To properly implement the spec semantics, maybe
@@ -1242,10 +1265,14 @@ export class SqliteBackend implements Backend {
       await this.transactionDoneCond.wait();
     }
 
-    this._prep(sqlBegin).run();
-    const objectStoreNames = this._loadObjectStoreNames(databaseName);
+    this.txLevel = TransactionLevel.VersionChange;
+
+    await this._runSqlBegin();
+
+    const objectStoreNames = await this._loadObjectStoreNames(databaseName);
+
     for (const storeName of objectStoreNames) {
-      const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+      const objRes = await (await 
this._prep(sqlGetObjectStoreMetaByName)).getFirst({
         name: storeName,
         database_name: databaseName,
       });
@@ -1253,13 +1280,13 @@ export class SqliteBackend implements Backend {
         throw Error("object store not found");
       }
       const objectStoreId = expectDbNumber(objRes, "id");
-      const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
+      const indexRes = await (await 
this._prep(sqlGetIndexesByObjectStoreId)).getAll({
         object_store_id: objectStoreId,
       });
       if (!indexRes) {
         throw Error("db inconsistent");
       }
-      const indexMap = new Map<string, ScopeIndexInfo>();
+      const indexList: MyIndexMeta[] = [];
       for (const idxInfo of indexRes) {
         const indexId = expectDbNumber(idxInfo, "id");
         const indexName = expectDbString(idxInfo, "name");
@@ -1271,43 +1298,45 @@ export class SqliteBackend implements Backend {
         if (!indexKeyPath) {
           throw Error("db inconsistent");
         }
-        indexMap.set(indexName, {
+        const indexMeta: MyIndexMeta = {
           indexId,
           keyPath: indexKeyPath,
           multiEntry: indexMultiEntry != 0,
           unique: indexUnique != 0,
-        });
+          currentName: indexName,
+          nameDirty: false,
+        };
+        indexList.push(indexMeta);
       }
-      this.txScope.set(storeName, {
-        objectStoreId,
-        indexMap,
-      });
 
-      for (const indexInfo of indexMap.values()) {
+      for (const indexInfo of indexList) {
         let stmt: Sqlite3Statement;
         if (indexInfo.unique) {
-          stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
+          stmt = await this._prep(sqlIUniqueIndexDataDeleteAll);
         } else {
-          stmt = this._prep(sqlIndexDataDeleteAll);
+          stmt = await this._prep(sqlIndexDataDeleteAll);
         }
-        stmt.run({
+        await stmt.run({
           index_id: indexInfo.indexId,
         });
-        this._prep(sqlIndexDelete).run({
+        await (await this._prep(sqlIndexDelete)).run({
           index_id: indexInfo.indexId,
         });
       }
-      this._prep(sqlObjectDataDeleteAll).run({
+      await (await this._prep(sqlObjectDataDeleteAll)).run({
         object_store_id: objectStoreId,
       });
-      this._prep(sqlObjectStoreDelete).run({
+      await (await this._prep(sqlObjectStoreDelete)).run({
         object_store_id: objectStoreId,
       });
     }
-    this._prep(sqlDeleteDatabase).run({
+    await (await this._prep(sqlDeleteDatabase)).run({
       name: databaseName,
     });
-    this._prep(sqlCommit).run();
+    await (await this._prep(sqlCommit)).run();
+
+    this.txLevel = TransactionLevel.None;
+    this.transactionDoneCond.trigger();
   }
 
   async close(db: DatabaseConnection): Promise<void> {
@@ -1315,10 +1344,13 @@ export class SqliteBackend implements Backend {
     if (!connInfo) {
       throw Error("connection not found");
     }
-    // FIXME: What if we're in a transaction? Does the backend interface allow 
this?
-    // if (this.txLevel !== TransactionLevel.None) {
-    //   throw Error("can't close while in transaction");
-    // }
+    // Wait until no transaction is active anymore.
+    while (1) {
+      if (this.txLevel == TransactionLevel.None) {
+        break;
+      }
+      await this.transactionDoneCond.wait();
+    }
     if (this.enableTracing) {
       console.log(`closing connection ${db.connectionCookie}`);
     }
@@ -1342,16 +1374,14 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(oldName);
-    if (!scopeInfo) {
+    const storeMeta = connInfo.storeMap.get(oldName);
+    if (!storeMeta) {
       throw Error("object store not found");
     }
-    this.txScope.delete(oldName);
-    this.txScope.set(newName, scopeInfo);
-    this._prep(sqlRenameObjectStore).run({
-      object_store_id: scopeInfo.objectStoreId,
-      name: newName,
-    });
+    connInfo.storeMap.delete(oldName);
+    connInfo.storeMap.set(newName, storeMeta);
+    storeMeta.currentName = newName;
+    storeMeta.nameDirty = true;
   }
 
   renameIndex(
@@ -1369,7 +1399,7 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error("object store not found");
     }
@@ -1380,9 +1410,30 @@ export class SqliteBackend implements Backend {
     // FIXME: Would also be much nicer with numeric UID handles
     scopeInfo.indexMap.delete(oldIndexName);
     scopeInfo.indexMap.set(newIndexName, indexInfo);
-    this._prep(sqlRenameIndex).run({
-      index_id: indexInfo.indexId,
-      name: newIndexName,
+    scopeInfo.nameDirty = true;
+    scopeInfo.currentName = newIndexName;
+  }
+
+  async _doDeleteObjectStore(scopeInfo: MyStoreMeta): Promise<void> {
+    for (const indexInfo of scopeInfo.indexMap.values()) {
+      let stmt: Sqlite3Statement;
+      if (indexInfo.unique) {
+        stmt = await this._prep(sqlIUniqueIndexDataDeleteAll);
+      } else {
+        stmt = await this._prep(sqlIndexDataDeleteAll);
+      }
+      await stmt.run({
+        index_id: indexInfo.indexId,
+      });
+      await (await this._prep(sqlIndexDelete)).run({
+        index_id: indexInfo.indexId,
+      });
+    }
+    await (await this._prep(sqlObjectDataDeleteAll)).run({
+      object_store_id: scopeInfo.objectStoreId,
+    });
+    await (await this._prep(sqlObjectStoreDelete)).run({
+      object_store_id: scopeInfo.objectStoreId,
     });
   }
 
@@ -1396,31 +1447,31 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(name);
+    const scopeInfo = connInfo.storeMap.get(name);
     if (!scopeInfo) {
       throw Error("object store not found");
     }
-    for (const indexInfo of scopeInfo.indexMap.values()) {
-      let stmt: Sqlite3Statement;
-      if (indexInfo.unique) {
-        stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
-      } else {
-        stmt = this._prep(sqlIndexDataDeleteAll);
-      }
-      stmt.run({
-        index_id: indexInfo.indexId,
-      });
-      this._prep(sqlIndexDelete).run({
-        index_id: indexInfo.indexId,
-      });
+    const storeMeta = connInfo.storeMap.get(name);
+    if (!storeMeta) {
+      throw Error("object store does not exist");
     }
-    this._prep(sqlObjectDataDeleteAll).run({
-      object_store_id: scopeInfo.objectStoreId,
+    connInfo.storeMap.delete(name);
+    storeMeta.currentName = undefined;
+  }
+
+  async _doDeleteIndex(indexMeta: MyIndexMeta): Promise<void> {
+    let stmt: Sqlite3Statement;
+    if (indexMeta.unique) {
+      stmt = await this._prep(sqlIUniqueIndexDataDeleteAll);
+    } else {
+      stmt = await this._prep(sqlIndexDataDeleteAll);
+    }
+    await stmt.run({
+      index_id: indexMeta.indexId,
     });
-    this._prep(sqlObjectStoreDelete).run({
-      object_store_id: scopeInfo.objectStoreId,
+    await (await this._prep(sqlIndexDelete)).run({
+      index_id: indexMeta.indexId,
     });
-    this.txScope.delete(name);
   }
 
   deleteIndex(
@@ -1437,27 +1488,16 @@ export class SqliteBackend implements Backend {
       throw Error("not connected");
     }
     // FIXME: Would be much nicer with numeric UID handles
-    const scopeInfo = this.txScope.get(objectStoreName);
-    if (!scopeInfo) {
+    const storeMeta = connInfo.storeMap.get(objectStoreName);
+    if (!storeMeta) {
       throw Error("object store not found");
     }
-    const indexInfo = scopeInfo.indexMap.get(indexName);
+    const indexInfo = storeMeta.indexMap.get(indexName);
     if (!indexInfo) {
       throw Error("index not found");
     }
-    scopeInfo.indexMap.delete(indexName);
-    let stmt: Sqlite3Statement;
-    if (indexInfo.unique) {
-      stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
-    } else {
-      stmt = this._prep(sqlIndexDataDeleteAll);
-    }
-    stmt.run({
-      index_id: indexInfo.indexId,
-    });
-    this._prep(sqlIndexDelete).run({
-      index_id: indexInfo.indexId,
-    });
+    storeMeta.indexMap.delete(indexName);
+    indexInfo.currentName = undefined;
   }
 
   async rollback(btx: DatabaseTransaction): Promise<void> {
@@ -1468,13 +1508,24 @@ export class SqliteBackend implements Backend {
     if (this.enableTracing) {
       console.log(`rolling back transaction ${btx.transactionCookie}`);
     }
+    const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+    if (!connInfo) {
+      throw Error("not connected");
+    }
     if (this.txLevel === TransactionLevel.None) {
       return;
     }
-    this._prep(sqlRollback).run();
+    await (await this._prep(sqlRollback)).run();
+    connInfo.storeList = [];
+    connInfo.storeMap.clear();
+    const objectStoreNames: string[] = await this._loadObjectStoreNames(
+      connInfo.databaseName,
+    );
+    for (const storeName of objectStoreNames) {
+      await this._loadScopeInfo(connInfo, storeName);
+    }
     this.txLevel = TransactionLevel.None;
     this.transactionMap.delete(btx.transactionCookie);
-    this.txScope.clear();
     this.transactionDoneCond.trigger();
   }
 
@@ -1483,19 +1534,88 @@ export class SqliteBackend implements Backend {
     if (!txInfo) {
       throw Error("transaction not found");
     }
+    const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+    if (!connInfo) {
+      throw Error("not connected");
+    }
     if (this.enableTracing) {
       console.log(`committing transaction ${btx.transactionCookie}`);
     }
     if (this.txLevel === TransactionLevel.None) {
       return;
     }
-    this._prep(sqlCommit).run();
+    if (this.txLevel === TransactionLevel.VersionChange) {
+      for (const store of connInfo.storeList) {
+        if (store.currentName == null) {
+          await this._doDeleteObjectStore(store);
+          continue;
+        }
+        if (store.objectStoreId == null) {
+          await this._provideObjectStore(connInfo, store);
+        }
+        if (store.nameDirty) {
+          await (await this._prep(sqlRenameObjectStore)).run({
+            object_store_id: store.objectStoreId,
+            name: store.currentName,
+          });
+        }
+        for (const indexMeta of store.indexList) {
+          if (indexMeta.currentName == null) {
+            await this._doDeleteIndex(indexMeta);
+            continue;
+          }
+          if (indexMeta.indexId == null) {
+            await this._provideIndex(connInfo, store, indexMeta);
+          }
+          if (indexMeta.nameDirty) {
+            await (await this._prep(sqlRenameIndex)).run({
+              index_id: indexMeta.indexId,
+              name: indexMeta.currentName,
+            });
+            indexMeta.nameDirty = false;
+          }
+        }
+      }
+    }
+    await (await this._prep(sqlCommit)).run();
     this.txLevel = TransactionLevel.None;
-    this.txScope.clear();
     this.transactionMap.delete(btx.transactionCookie);
     this.transactionDoneCond.trigger();
   }
 
+  async _provideObjectStore(
+    connInfo: ConnectionInfo,
+    storeMeta: MyStoreMeta,
+  ): Promise<SqliteRowid> {
+    if (storeMeta.objectStoreId != null) {
+      return storeMeta.objectStoreId;
+    }
+    if (!storeMeta.currentName) {
+      throw Error("invalid state");
+    }
+
+    const runRes = await (await this._prep(sqlCreateObjectStore)).run({
+      name: storeMeta.currentName,
+      key_path: serializeKeyPath(storeMeta.keyPath),
+      auto_increment: storeMeta.autoIncrement ? 1 : 0,
+      database_name: connInfo.databaseName,
+    });
+
+    storeMeta.objectStoreId = runRes.lastInsertRowid;
+
+    for (const indexMeta of storeMeta.indexList) {
+      if (indexMeta.currentName == null) {
+        continue;
+      }
+      if (indexMeta.indexId != null) {
+        throw Error("invariant violated");
+      }
+      await this._provideIndex(connInfo, storeMeta, indexMeta);
+    }
+
+    return runRes.lastInsertRowid;
+  }
+
   createObjectStore(
     btx: DatabaseTransaction,
     name: string,
@@ -1513,20 +1633,70 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.VersionChange) {
       throw Error("only allowed in versionchange transaction");
     }
-    if (this.txScope.has(name)) {
+    if (connInfo.storeMap.has(name)) {
       throw Error("object store already exists");
     }
-    let myKeyPath = serializeKeyPath(keyPath);
-    const runRes = this._prep(sqlCreateObjectStore).run({
-      name,
-      key_path: myKeyPath,
-      auto_increment: autoIncrement ? 1 : 0,
-      database_name: connInfo.databaseName,
-    });
-    this.txScope.set(name, {
-      objectStoreId: runRes.lastInsertRowid,
+    const storeMeta: MyStoreMeta = {
+      objectStoreId: undefined,
       indexMap: new Map(),
+      indexList: [],
+      // Normalize
+      keyPath: deserializeKeyPath(serializeKeyPath(keyPath)),
+      autoIncrement: autoIncrement,
+      currentName: name,
+      nameDirty: false,
+    };
+    connInfo.storeList.push(storeMeta);
+    connInfo.storeMap.set(name, storeMeta);
+  }
+
+  async _provideIndex(
+    connInfo: ConnectionInfo,
+    storeMeta: MyStoreMeta,
+    indexMeta: MyIndexMeta,
+  ) {
+    if (indexMeta.indexId != null) {
+      return indexMeta.indexId;
+    }
+    if (storeMeta.objectStoreId == null) {
+      throw Error("invariant failed");
+    }
+    const res = await (await this._prep(sqlCreateIndex)).run({
+      object_store_id: storeMeta.objectStoreId,
+      name: indexMeta.currentName,
+      key_path: serializeKeyPath(indexMeta.keyPath),
+      unique: indexMeta.unique ? 1 : 0,
+      multientry: indexMeta.multiEntry ? 1 : 0,
     });
+    indexMeta.indexId = res.lastInsertRowid;
+    // FIXME: We can't use an iterator here, as it's not allowed to
+    // execute a write statement while the iterator executes.
+    // Maybe do multiple selects instead of loading everything into memory?
+    const keyRowsRes = await (await this._prep(sqlObjectDataGetAll)).getAll({
+      object_store_id: storeMeta.objectStoreId,
+    });
+
+    for (const keyRow of keyRowsRes) {
+      assertDbInvariant(typeof keyRow === "object" && keyRow != null);
+      assertDbInvariant("key" in keyRow);
+      assertDbInvariant("value" in keyRow);
+      assertDbInvariant(typeof keyRow.value === "string");
+      const key = keyRow.key;
+      const value = structuredRevive(JSON.parse(keyRow.value));
+      assertDbInvariant(key instanceof Uint8Array);
+      try {
+        await this.insertIntoIndex(indexMeta, key, value);
+      } catch (e) {
+        // FIXME: Catch this in insertIntoIndex!
+        if (e instanceof DataError) {
+          // https://www.w3.org/TR/IndexedDB-2/#object-store-storage-operation
+          // Do nothing
+        } else {
+          throw e;
+        }
+      }
+    }
+    return res.lastInsertRowid;
   }
 
   createIndex(
@@ -1548,7 +1718,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.VersionChange) {
       throw Error("only allowed in versionchange transaction");
     }
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error("object store does not exist, can't create index");
     }
@@ -1560,48 +1730,16 @@ export class SqliteBackend implements Backend {
       console.log(`creating index "${indexName}"`);
     }
 
-    const res = this._prep(sqlCreateIndex).run({
-      object_store_id: scopeInfo.objectStoreId,
-      name: indexName,
-      key_path: serializeKeyPath(keyPath),
-      unique: unique ? 1 : 0,
-      multientry: multiEntry ? 1 : 0,
-    });
-    const scopeIndexInfo: ScopeIndexInfo = {
-      indexId: res.lastInsertRowid,
+    const scopeIndexInfo: MyIndexMeta = {
+      indexId: undefined,
       keyPath,
       multiEntry,
       unique,
+      currentName: indexName,
+      nameDirty: false,
     };
+    scopeInfo.indexList.push(scopeIndexInfo);
     scopeInfo.indexMap.set(indexName, scopeIndexInfo);
-
-    // FIXME: We can't use an iterator here, as it's not allowed to
-    // execute a write statement while the iterator executes.
-    // Maybe do multiple selects instead of loading everything into memory?
-    const keyRowsRes = this._prep(sqlObjectDataGetAll).getAll({
-      object_store_id: scopeInfo.objectStoreId,
-    });
-
-    for (const keyRow of keyRowsRes) {
-      assertDbInvariant(typeof keyRow === "object" && keyRow != null);
-      assertDbInvariant("key" in keyRow);
-      assertDbInvariant("value" in keyRow);
-      assertDbInvariant(typeof keyRow.value === "string");
-      const key = keyRow.key;
-      const value = structuredRevive(JSON.parse(keyRow.value));
-      assertDbInvariant(key instanceof Uint8Array);
-      try {
-        this.insertIntoIndex(scopeIndexInfo, key, value);
-      } catch (e) {
-        // FIXME: Catch this in insertIntoIndex!
-        if (e instanceof DataError) {
-          // https://www.w3.org/TR/IndexedDB-2/#object-store-storage-operation
-          // Do nothing
-        } else {
-          throw e;
-        }
-      }
-    }
   }
 
   async deleteRecord(
@@ -1620,7 +1758,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Write) {
       throw Error("store operation only allowed while running a transaction");
     }
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -1629,6 +1767,8 @@ export class SqliteBackend implements Backend {
       );
     }
 
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+
     // PERF: We delete keys one-by-one here.
     // Instead, we could do it with a single
     // delete query for the object data / index data.
@@ -1637,15 +1777,15 @@ export class SqliteBackend implements Backend {
 
     if (range.lower != null) {
       const targetKey = serializeKey(range.lower);
-      currKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         currentKey: null,
         forward: true,
         inclusive: true,
         targetKey,
       });
     } else {
-      currKey = this._startObjectKey(scopeInfo.objectStoreId, true);
+      currKey = await this._startObjectKey(objectStoreId, true);
     }
 
     let upperBound: Uint8Array | undefined;
@@ -1672,7 +1812,7 @@ export class SqliteBackend implements Backend {
 
       // Now delete!
 
-      this._prep(sqlObjectDataDeleteKey).run({
+      await (await this._prep(sqlObjectDataDeleteKey)).run({
         object_store_id: scopeInfo.objectStoreId,
         key: currKey,
       });
@@ -1680,18 +1820,18 @@ export class SqliteBackend implements Backend {
       for (const index of scopeInfo.indexMap.values()) {
         let stmt: Sqlite3Statement;
         if (index.unique) {
-          stmt = this._prep(sqlUniqueIndexDataDeleteKey);
+          stmt = await this._prep(sqlUniqueIndexDataDeleteKey);
         } else {
-          stmt = this._prep(sqlIndexDataDeleteKey);
+          stmt = await this._prep(sqlIndexDataDeleteKey);
         }
-        stmt.run({
+        await stmt.run({
           index_id: index.indexId,
           object_key: currKey,
         });
       }
 
-      currKey = this._continueObjectKey({
-        objectStoreId: scopeInfo.objectStoreId,
+      currKey = await this._continueObjectKey({
+        objectStoreId: objectStoreId,
         currentKey: null,
         forward: true,
         inclusive: false,
@@ -1715,7 +1855,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Write) {
       throw Error("store operation only allowed while running a transaction");
     }
-    const scopeInfo = this.txScope.get(storeReq.objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(storeReq.objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -1723,8 +1863,9 @@ export class SqliteBackend implements Backend {
         )} not in transaction scope`,
       );
     }
-    const metaRes = this._prep(sqlGetObjectStoreMetaById).getFirst({
-      id: scopeInfo.objectStoreId,
+    const objectStoreId = await this._provideObjectStore(connInfo, scopeInfo);
+    const metaRes = await (await 
this._prep(sqlGetObjectStoreMetaById)).getFirst({
+      id: objectStoreId satisfies SqliteRowid,
     });
     if (metaRes === undefined) {
       throw Error(
@@ -1776,8 +1917,8 @@ export class SqliteBackend implements Backend {
 
     const serializedObjectKey = serializeKey(key);
 
-    const existingObj = this._getObjectValue(
-      scopeInfo.objectStoreId,
+    const existingObj = await this._getObjectValue(
+      objectStoreId,
       serializedObjectKey,
     );
 
@@ -1787,30 +1928,31 @@ export class SqliteBackend implements Backend {
       }
     }
 
-    this._prep(sqlInsertObjectData).run({
-      object_store_id: scopeInfo.objectStoreId,
+    await (await this._prep(sqlInsertObjectData)).run({
+      object_store_id: objectStoreId,
       key: serializedObjectKey,
       value: JSON.stringify(structuredEncapsulate(value)),
     });
 
     if (autoIncrement != 0) {
-      this._prep(sqlUpdateAutoIncrement).run({
-        object_store_id: scopeInfo.objectStoreId,
+      await (await this._prep(sqlUpdateAutoIncrement)).run({
+        object_store_id: objectStoreId,
         auto_increment: updatedKeyGenerator,
       });
     }
 
     for (const [k, indexInfo] of scopeInfo.indexMap.entries()) {
+      const indexId = await this._provideIndex(connInfo, scopeInfo, indexInfo);
       if (existingObj) {
-        this.deleteFromIndex(
-          indexInfo.indexId,
+        await this.deleteFromIndex(
+          indexId,
           indexInfo.unique,
           serializedObjectKey,
         );
       }
 
       try {
-        this.insertIntoIndex(indexInfo, serializedObjectKey, value);
+        await this.insertIntoIndex(indexInfo, serializedObjectKey, value);
       } catch (e) {
         // FIXME: handle this in insertIntoIndex!
         if (e instanceof DataError) {
@@ -1831,28 +1973,28 @@ export class SqliteBackend implements Backend {
     };
   }
 
-  private deleteFromIndex(
+  private async deleteFromIndex(
     indexId: SqliteRowid,
     indexUnique: boolean,
     objectKey: Uint8Array,
-  ): void {
+  ): Promise<void> {
     let stmt: Sqlite3Statement;
     if (indexUnique) {
-      stmt = this._prep(sqlUniqueIndexDataDeleteKey);
+      stmt = await this._prep(sqlUniqueIndexDataDeleteKey);
     } else {
-      stmt = this._prep(sqlIndexDataDeleteKey);
+      stmt = await this._prep(sqlIndexDataDeleteKey);
     }
-    stmt.run({
+    await stmt.run({
       index_id: indexId,
       object_key: objectKey,
     });
   }
 
-  private insertIntoIndex(
-    indexInfo: ScopeIndexInfo,
+  private async insertIntoIndex(
+    indexInfo: MyIndexMeta,
     primaryKey: Uint8Array,
     value: any,
-  ): void {
+  ): Promise<void> {
     const indexKeys = getIndexKeys(
       value,
       indexInfo.keyPath,
@@ -1864,16 +2006,16 @@ export class SqliteBackend implements Backend {
 
     let stmt;
     if (indexInfo.unique) {
-      stmt = this._prep(sqlInsertUniqueIndexData);
+      stmt = await this._prep(sqlInsertUniqueIndexData);
     } else {
-      stmt = this._prep(sqlInsertIndexData);
+      stmt = await this._prep(sqlInsertIndexData);
     }
 
     for (const indexKey of indexKeys) {
       // FIXME: Re-throw correct error for unique index violations
       const serializedIndexKey = serializeKey(indexKey);
       try {
-        stmt.run({
+        await stmt.run({
           index_id: indexInfo.indexId,
           object_key: primaryKey,
           index_key: serializedIndexKey,
@@ -1902,7 +2044,7 @@ export class SqliteBackend implements Backend {
     if (this.txLevel < TransactionLevel.Write) {
       throw Error("store operation only allowed while running a transaction");
     }
-    const scopeInfo = this.txScope.get(objectStoreName);
+    const scopeInfo = connInfo.storeMap.get(objectStoreName);
     if (!scopeInfo) {
       throw Error(
         `object store ${JSON.stringify(
@@ -1911,32 +2053,33 @@ export class SqliteBackend implements Backend {
       );
     }
 
-    this._prep(sqlClearObjectStore).run({
+    await (await this._prep(sqlClearObjectStore)).run({
       object_store_id: scopeInfo.objectStoreId,
     });
 
     for (const index of scopeInfo.indexMap.values()) {
       let stmt: Sqlite3Statement;
       if (index.unique) {
-        stmt = this._prep(sqlClearUniqueIndexData);
+        stmt = await this._prep(sqlClearUniqueIndexData);
       } else {
-        stmt = this._prep(sqlClearIndexData);
+        stmt = await this._prep(sqlClearIndexData);
       }
-      stmt.run({
+      await stmt.run({
         index_id: index.indexId,
       });
     }
   }
 
   async backupToFile(path: string): Promise<void> {
-    const stmt = this._prep("VACUUM INTO $filename;");
-    stmt.run({
+    const stmt = await this._prep("VACUUM INTO $filename;");
+    await stmt.run({
       filename: path,
     });
   }
 }
 
 const schemaSql = `
+BEGIN;
 CREATE TABLE IF NOT EXISTS databases
 ( name TEXT PRIMARY KEY
 , version INTEGER NOT NULL
@@ -1987,6 +2130,7 @@ CREATE TABLE IF NOT EXISTS unique_index_data
 , FOREIGN KEY (index_id)
     REFERENCES indexes(id)
 );
+COMMIT;
 `;
 
 const sqlClearObjectStore = `
@@ -2329,8 +2473,8 @@ export async function createSqliteBackend(
   sqliteImpl: Sqlite3Interface,
   options: SqliteBackendOptions,
 ): Promise<SqliteBackend> {
-  const db = sqliteImpl.open(options.filename);
-  db.exec("PRAGMA foreign_keys = ON;");
-  db.exec(schemaSql);
+  const db = await sqliteImpl.open(options.filename);
+  await db.exec("PRAGMA foreign_keys = ON;");
+  await db.exec(schemaSql);
   return new SqliteBackend(sqliteImpl, db);
 }
diff --git a/packages/idb-bridge/src/backend-interface.ts 
b/packages/idb-bridge/src/backend-interface.ts
index 690f92f54..201e1aea7 100644
--- a/packages/idb-bridge/src/backend-interface.ts
+++ b/packages/idb-bridge/src/backend-interface.ts
@@ -213,7 +213,7 @@ export interface Backend {
     indexName: string,
   ): void;
 
-  rollback(btx: DatabaseTransaction): void;
+  rollback(btx: DatabaseTransaction): Promise<void>;
 
   // FIXME: Should probably not be async
   commit(btx: DatabaseTransaction): Promise<void>;
diff --git a/packages/idb-bridge/src/bench.ts b/packages/idb-bridge/src/bench.ts
index d196bacb1..f573c0eb8 100644
--- a/packages/idb-bridge/src/bench.ts
+++ b/packages/idb-bridge/src/bench.ts
@@ -22,7 +22,7 @@ import {
   BridgeIDBTransaction,
   createSqliteBackend,
 } from "./index.js";
-import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+import { createNodeHelperSqlite3Impl } from "./node-helper-sqlite3-impl.js";
 
 function openDb(idbFactory: BridgeIDBFactory): Promise<BridgeIDBDatabase> {
   return new Promise((resolve, reject) => {
@@ -82,7 +82,7 @@ async function main() {
 
   console.log(`doing ${nTx} iterations of ${nInsert} items`);
 
-  const sqlite3Impl = await createNodeSqlite3Impl();
+  const sqlite3Impl = await createNodeHelperSqlite3Impl();
   const backend = await createSqliteBackend(sqlite3Impl, {
     filename,
   });
diff --git a/packages/idb-bridge/src/bridge-idb.ts 
b/packages/idb-bridge/src/bridge-idb.ts
index 15c68c733..6b710ea71 100644
--- a/packages/idb-bridge/src/bridge-idb.ts
+++ b/packages/idb-bridge/src/bridge-idb.ts
@@ -622,7 +622,10 @@ export class BridgeIDBDatabase extends FakeEventTarget 
implements IDBDatabase {
 
     if (transactionsComplete) {
       this._closed = true;
-      this._backend.close(this._backendConnection);
+      this._backend.close(this._backendConnection).catch((e) => {
+        console.error("Error while closing connection.");
+        console.error(e);
+      });
     } else {
       queueTask(() => {
         this._closeConnection();
@@ -871,9 +874,16 @@ export class BridgeIDBFactory {
       throw new TypeError();
     }
 
+    if (BridgeIDBFactory.enableTracing) {
+      console.log(`opening DB ${name} at version ${version}`);
+    }
+
     const request = new BridgeIDBOpenDBRequest();
 
     queueTask(async () => {
+      if (BridgeIDBFactory.enableTracing) {
+        console.log(`running task to open DB ${name} at version ${version}`);
+      }
       let dbConnRes: ConnectResult;
       try {
         if (BridgeIDBFactory.enableTracing) {
@@ -901,13 +911,25 @@ export class BridgeIDBFactory {
 
       const requestedVersion = version;
 
+      if (BridgeIDBFactory.enableTracing) {
+        console.log(`existing version of DB ${name} is ${existingVersion}, 
requesting ${requestedVersion}`);
+      }
+
       BridgeIDBFactory.enableTracing &&
         console.log(
           `TRACE: existing version ${existingVersion}, requested version 
${requestedVersion}`,
         );
 
       if (existingVersion > requestedVersion) {
-        request._finishWithError(new VersionError());
+        this.backend.close(dbConnRes.conn).catch((e) => {
+          console.error("failed to close database");
+          console.error(e);
+        });
+        request._finishWithError(
+          new VersionError(
+            `requested version ${requestedVersion}, existing version is 
${existingVersion}`,
+          ),
+        );
         return;
       }
 
@@ -935,7 +957,7 @@ export class BridgeIDBFactory {
           if (otherConn._name != db._name) {
             continue;
           }
-          if (otherConn._closePending) {
+          if (otherConn._closePending || otherConn._closed) {
             continue;
           }
           if (BridgeIDBFactory.enableTracing) {
@@ -950,7 +972,7 @@ export class BridgeIDBFactory {
           otherConn.dispatchEvent(event);
         }
 
-        if (this._anyOpen()) {
+        if (this._anyOpen(name)) {
           if (BridgeIDBFactory.enableTracing) {
             console.log(
               "other connections are still open, dispatching 'blocked' event 
to other connection",
@@ -1057,8 +1079,19 @@ export class BridgeIDBFactory {
     return "[object IDBFactory]";
   }
 
-  private _anyOpen(): boolean {
-    return this.connections.some((c) => !c._closed && !c._closePending);
+  private _anyOpen(dbName: string): boolean {
+    let numOpen = 0;
+    for (const conn of this.connections) {
+      if (conn._name == dbName && !conn._closed && !conn._closePending) {
+        numOpen++;
+      }
+    }
+    if (numOpen > 0) {
+      if (BridgeIDBFactory.enableTracing) {
+        console.log(`there are ${numOpen} connections still open to 
${dbName}`);
+      }
+    }
+    return numOpen > 0;
   }
 }
 
@@ -1164,7 +1197,7 @@ export class BridgeIDBIndex implements IDBIndex {
     indexSet.splice(indexIdx, 1);
   }
 
-  _abort() {
+  _abort(): void {
     if (this._originalName != null) {
       this._applyNameChange(this._name, this._originalName);
     }
@@ -2374,15 +2407,6 @@ export class BridgeIDBRequest extends FakeEventTarget 
implements IDBRequest {
 
     this.dispatchEvent(event);
   }
-
-  _finishWithResult(result: any) {
-    this.result = result;
-    this.readyState = "done";
-
-    const event = new FakeEvent("success");
-    event.eventPath = [];
-    this.dispatchEvent(event);
-  }
 }
 
 export class BridgeIDBOpenDBRequest
@@ -2541,7 +2565,7 @@ export class BridgeIDBTransaction
   }
 
   // 
http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#dfn-steps-for-aborting-a-transaction
-  async _abort(errName: string | null) {
+  _abort(errName: string | null): void {
     if (BridgeIDBFactory.enableTracing) {
       console.log("TRACE: aborting transaction");
     }
@@ -2589,7 +2613,9 @@ export class BridgeIDBTransaction
 
     const maybeBtx = this._backendTransaction;
     if (maybeBtx) {
-      this._backend.rollback(maybeBtx);
+      this._backend.rollback(maybeBtx).catch((e) => {
+        console.warn(`error during rollback: ${e}`);
+      });
     }
 
     // "Any object stores and indexes which were created during the
@@ -2744,7 +2770,10 @@ export class BridgeIDBTransaction
           // Probably there is a more elegant way to do this by aborting the
           // beginTransaction call when the transaction was aborted.
           // That would require changing the DB backend API.
-          this._backend.rollback(newBackendTx);
+          this._backend.rollback(newBackendTx).catch((e) => {
+            console.error("error during rollback");
+            console.error(e);
+          });
         } else {
           this._backendTransaction = newBackendTx;
         }
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts 
b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
index b8046fc1b..8782d4891 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
@@ -140,6 +140,7 @@ test("WPT idbfactory-open7.htm", async (t) => {
 
     open_rq.onupgradeneeded = function () {};
     open_rq.onsuccess = function (e: any) {
+      console.log("open7 - opening higher version DB");
       var db = e.target.result;
       db.close();
 
@@ -152,8 +153,12 @@ test("WPT idbfactory-open7.htm", async (t) => {
     };
 
     function open_current_db(e: any) {
-      var open_rq3 = indexedDB.open(e.target.result.name);
+      console.log("open7 - opening current DB");
+      const name = e.target.result.name;
+      console.log(`open7 - name is ${name}`)
+      var open_rq3 = indexedDB.open(name);
       open_rq3.onsuccess = function (e: any) {
+        console.log("open7 - success opening current DB");
         t.deepEqual(e.target.result.version, 14, "db.version");
         open_rq2.result.close();
         open_rq3.result.close();
@@ -278,6 +283,8 @@ test("WPT idbfactory-open9.htm", async (t) => {
   await should_work(1.5, 1);
   await should_work(Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER); // 
0x20000000000000 - 1
   await should_work(undefined, 1);
+
+  console.error("test at end");
 });
 
 // IDBFactory.open() - error in version change transaction aborts open
diff --git a/packages/idb-bridge/src/index.ts b/packages/idb-bridge/src/index.ts
index 47ff80119..1d0751c4e 100644
--- a/packages/idb-bridge/src/index.ts
+++ b/packages/idb-bridge/src/index.ts
@@ -32,8 +32,8 @@ import {
 } from "./MemoryBackend.js";
 import { Listener } from "./util/FakeEventTarget.js";
 
-export * from "./SqliteBackend.js";
 export * from "./sqlite3-interface.js";
+export * from "./SqliteBackend.js";
 
 export * from "./idbtypes.js";
 export { MemoryBackend } from "./MemoryBackend.js";
@@ -49,25 +49,25 @@ export {
   BridgeIDBOpenDBRequest,
   BridgeIDBRequest,
   BridgeIDBTransaction,
-  StoreLevel,
   ResultLevel,
+  StoreLevel,
 };
 export type {
-  DatabaseTransaction,
-  RecordGetResponse,
   Backend,
-  DatabaseList,
-  RecordStoreRequest,
-  RecordStoreResponse,
   DatabaseConnection,
-  RequestObj,
   DatabaseDump,
-  ObjectStoreDump,
-  IndexRecord,
-  ObjectStoreRecord,
-  MemoryBackendDump,
+  DatabaseList,
+  DatabaseTransaction,
   Event,
+  IndexRecord,
   Listener,
+  MemoryBackendDump,
+  ObjectStoreDump,
+  ObjectStoreRecord,
+  RecordGetResponse,
+  RecordStoreRequest,
+  RecordStoreResponse,
+  RequestObj,
 };
 
 // globalThis polyfill, see https://mathiasbynens.be/notes/globalthis
diff --git a/packages/idb-bridge/src/node-helper-sqlite3-impl.test.ts 
b/packages/idb-bridge/src/node-helper-sqlite3-impl.test.ts
new file mode 100644
index 000000000..c1cdca3a3
--- /dev/null
+++ b/packages/idb-bridge/src/node-helper-sqlite3-impl.test.ts
@@ -0,0 +1,82 @@
+/*
+ Copyright 2019 Florian Dold
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ or implied. See the License for the specific language governing
+ permissions and limitations under the License.
+ */
+
+import test from "ava";
+import * as fs from "node:fs";
+import { createNodeHelperSqlite3Impl } from "./node-helper-sqlite3-impl.js";
+
+test("sqlite3 helper", async (t) => {
+  const filename = "mytestdb.sqlite3";
+  try {
+    fs.unlinkSync(filename);
+  } catch (e) {
+    // Do nothing.
+  }
+  const impl = await createNodeHelperSqlite3Impl();
+
+  const db = await impl.open(filename);
+
+  await db.exec(`CREATE TABLE person(firstname, lastname, age);
+    CREATE TABLE book(title, author, published);
+    CREATE TABLE publisher(name, address);`);
+
+  const stmt1 = await db.prepare(
+    "INSERT INTO book (title, author, published) VALUES ($title, $author, 
$published)",
+  );
+
+  const stmt2 = await db.prepare(
+    "SELECT author, title, NULL as foobar, 42, published FROM book WHERE 
published=$published ORDER BY author",
+  );
+
+  const stmtBegin = await db.prepare("BEGIN");
+
+  const stmtCommit = await db.prepare("COMMIT");
+
+  await stmtBegin.run();
+
+  const r1 = await stmt1.run({
+    title: "foo",
+    author: "bar",
+    published: 1995,
+  });
+
+  const r2 = await stmt1.run({
+    title: "foo2",
+    author: "bar2",
+    published: 1998,
+  });
+
+  await stmt1.run({
+    title: "foo4",
+    author: "bar4",
+    published: 1995,
+  });
+
+  t.deepEqual(r1.lastInsertRowid, 1n);
+  t.deepEqual(r2.lastInsertRowid, 2n);
+
+  const r3 = await stmtCommit.run();
+
+  const getRes1 = await stmt2.getAll({
+    published: 1995,
+  });
+
+  t.deepEqual(getRes1.length, 2);
+  t.deepEqual(getRes1[0].title, "foo");
+  t.deepEqual(getRes1[1].title, "foo4");
+
+  t.pass();
+});
diff --git a/packages/idb-bridge/src/node-helper-sqlite3-impl.ts 
b/packages/idb-bridge/src/node-helper-sqlite3-impl.ts
new file mode 100644
index 000000000..3ba37576b
--- /dev/null
+++ b/packages/idb-bridge/src/node-helper-sqlite3-impl.ts
@@ -0,0 +1,572 @@
+/*
+ This file is part of GNU Taler
+ (C) 2024 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+ */
+
+import stream from "node:stream";
+import {
+  BindParams,
+  ResultRow,
+  RunResult,
+  Sqlite3Database,
+  Sqlite3Interface,
+  Sqlite3Statement,
+} from "./sqlite3-interface.js";
+
+import child_process, { ChildProcessByStdio } from "node:child_process";
+import { openPromise } from "./util/openPromise.js";
+
+enum HelperCmd {
+  HELLO = 1,
+  SHUTDOWN = 2,
+  OPEN = 3,
+  CLOSE = 4,
+  PREPARE = 5,
+  STMT_GET_ALL = 6,
+  STMT_GET_FIRST = 7,
+  STMT_RUN = 8,
+  EXEC = 9,
+}
+
+enum HelperResp {
+  OK = 1,
+  FAIL = 2,
+  ROWLIST = 3,
+  RUNRESULT = 4,
+  STMT = 5,
+}
+
+function concatArr(as: Uint8Array[]): Uint8Array {
+  let len = 0;
+  for (const a of as) {
+    len += a.length;
+  }
+  const b = new Uint8Array(len);
+  let pos = 0;
+  for (const a of as) {
+    b.set(a, pos);
+    pos += a.length;
+  }
+  return b;
+}
+
+interface ReqInfo {
+  resolve: (x: Uint8Array) => void;
+}
+
+class Helper {
+  private reqCounter = 0;
+  private reqMap: Map<number, ReqInfo> = new Map();
+  private inChunks: Uint8Array[] = [];
+  private inSize: number = 0;
+  private expectSize: number = 0;
+  private enableTracing: boolean;
+  private isListening: boolean = false;
+  public proc: ChildProcessByStdio<stream.Writable, stream.Readable, null>;
+
+  constructor(opts?: { enableTracing: boolean }) {
+    this.enableTracing = opts?.enableTracing ?? false;
+    this.proc = child_process.spawn("taler-helper-sqlite3", {
+      stdio: ["pipe", "pipe", "inherit"],
+    });
+    // Make sure that the process is not blocking the parent process
+    // from exiting.
+    // When we are actively waiting for a response, we ref it again.
+    this.unrefProc();
+  }
+
+  private unrefProc() {
+    this.proc.unref();
+    try {
+      // @ts-ignore
+      this.proc.stdout.unref();
+    } catch (e) {
+      // Do nothing.
+    }
+  }
+
+  private refProc() {
+    this.proc.ref();
+    try {
+      // @ts-ignore
+      this.proc.stdout.ref();
+    } catch (e) {
+      // Do nothing.
+    }
+  }
+
+  startListening() {
+    if (this.isListening) {
+      console.error("Warning: Already listening");
+      return;
+    }
+    if (this.enableTracing) {
+      console.error("starting listening for data");
+    }
+    this.refProc();
+    this.proc.stdout.on("data", (chunk: Uint8Array) => {
+      if (this.enableTracing) {
+        console.error(`received chunk of size ${chunk.length} from helper`);
+      }
+      this.inChunks.push(chunk);
+      this.inSize += chunk.length;
+
+      while (true) {
+        if (this.expectSize === 0) {
+          if (this.inSize >= 4) {
+            const data = concatArr(this.inChunks);
+            const dv = new DataView(data.buffer);
+            const len = dv.getUint32(0);
+            this.expectSize = len;
+            continue;
+          }
+        }
+
+        if (this.expectSize > 0 && this.inSize >= this.expectSize) {
+          const data = concatArr(this.inChunks);
+          const packet = data.slice(0, this.expectSize);
+          const rest = data.slice(this.expectSize);
+          this.inSize = this.inSize - packet.length;
+          this.inChunks = [rest];
+          this.expectSize = 0;
+          this.processResponse(packet);
+          continue;
+        }
+
+        break;
+      }
+    });
+    this.isListening = true;
+  }
+
+  processResponse(packet: Uint8Array): void {
+    const dv = new DataView(packet.buffer);
+    const reqId = dv.getUint32(4);
+    if (this.enableTracing) {
+      console.error(
+        `processing complete response packet to ${reqId} from helper`,
+      );
+    }
+    const ri = this.reqMap.get(reqId);
+    if (!ri) {
+      console.error(`no request for response with ID ${reqId}`);
+      return;
+    }
+    this.reqMap.delete(reqId);
+    ri.resolve(packet.slice(8));
+  }
+
+  async communicate(cmd: number, payload: Uint8Array): Promise<Uint8Array> {
+    if (!this.isListening) {
+      this.startListening();
+    }
+
+    const prom = openPromise<Uint8Array>();
+    const reqNum = ++this.reqCounter;
+    this.reqMap.set(reqNum, {
+      resolve: prom.resolve,
+    });
+    // len, reqId, reqType, payload
+    const bufLen = 4 + 4 + 1 + payload.length;
+    const buf = new Uint8Array(bufLen);
+    const dv = new DataView(buf.buffer);
+    dv.setUint32(0, bufLen);
+    dv.setUint32(4, reqNum);
+    dv.setUint8(8, cmd);
+    buf.set(payload, 9);
+
+    await new Promise<void>((resolve, reject) => {
+      if (this.enableTracing) {
+        console.error(`writing to helper stdin for request ${reqNum}`);
+      }
+      this.proc.stdin.write(buf, (err) => {
+        if (this.enableTracing) {
+          console.error(`done writing to helper stdin for request ${reqNum}`);
+        }
+        if (err) {
+          reject(err);
+          return;
+        }
+        resolve();
+      });
+    });
+    const resp = await prom.promise;
+    if (this.enableTracing) {
+      console.error(
+        `request to ${reqNum} got result, reqMap keys ${[
+          ...this.reqMap.keys(),
+        ]}`,
+      );
+    }
+    if (this.reqMap.size === 0) {
+      this.isListening = false;
+      this.proc.stdout.removeAllListeners();
+      this.unrefProc();
+    }
+    return resp;
+  }
+}
+
+enum TypeTag {
+  NULL = 1,
+  INT = 2,
+  REAL = 3,
+  TEXT = 4,
+  BLOB = 5,
+}
+
+function encodeParams(wr: Writer, params: BindParams | undefined): void {
+  const keys = Object.keys(params ?? {});
+  wr.writeUint16(keys.length);
+  for (const key of keys) {
+    wr.writeString(key);
+    const val = params![key];
+    if (typeof val === "number" || typeof val === "bigint") {
+      wr.writeUint8(TypeTag.INT);
+      wr.writeInt64(BigInt(val));
+    } else if (val == null) {
+      wr.writeUint8(TypeTag.NULL);
+    } else if (typeof val === "string") {
+      wr.writeUint8(TypeTag.TEXT);
+      wr.writeString(val);
+    } else if (ArrayBuffer.isView(val)) {
+      wr.writeUint8(TypeTag.BLOB);
+      wr.writeUint32(val.length);
+      wr.writeRawBytes(val);
+    } else {
+      throw Error("unsupported type for bind params");
+    }
+  }
+}
+
+function decodeRowList(rd: Reader): ResultRow[] {
+  const rows: ResultRow[] = [];
+  const numRows = rd.readUint16();
+  const numCols = rd.readUint16();
+  const colNames: string[] = [];
+  for (let i = 0; i < numCols; i++) {
+    colNames.push(rd.readString());
+  }
+  for (let i = 0; i < numRows; i++) {
+    const row: ResultRow = {};
+    for (let j = 0; j < numCols; j++) {
+      const valTag = rd.readUint8();
+      if (valTag === TypeTag.NULL) {
+        row[colNames[j]] = null;
+      } else if (valTag == TypeTag.TEXT) {
+        row[colNames[j]] = rd.readString();
+      } else if (valTag == TypeTag.BLOB) {
+        row[colNames[j]] = rd.readBytes();
+      } else if (valTag == TypeTag.INT) {
+        let val: number | bigint = rd.readInt64();
+        if (val <= Number.MAX_SAFE_INTEGER && val >= Number.MIN_SAFE_INTEGER) {
+          val = Number(val);
+        }
+        row[colNames[j]] = val;
+      }
+    }
+    rows.push(row);
+  }
+  return rows;
+}
+
+class Reader {
+  public pos = 0;
+  private dv: DataView;
+  private td = new TextDecoder();
+  constructor(private buf: Uint8Array) {
+    this.dv = new DataView(buf.buffer);
+  }
+  readUint16(): number {
+    const res = this.dv.getUint16(this.pos);
+    this.pos += 2;
+    return res;
+  }
+  readInt64(): bigint {
+    const res = this.dv.getBigInt64(this.pos);
+    this.pos += 8;
+    return res;
+  }
+  readUint8(): number {
+    const res = this.dv.getUint8(this.pos);
+    this.pos += 1;
+    return res;
+  }
+  readString(): string {
+    const len = this.dv.getUint32(this.pos);
+    const strBuf = this.buf.slice(this.pos + 4, this.pos + 4 + len);
+    this.pos += 4 + len;
+    return this.td.decode(strBuf);
+  }
+  readBytes(): Uint8Array {
+    const len = this.dv.getUint32(this.pos);
+    const rBuf = this.buf.slice(this.pos + 4, this.pos + 4 + len);
+    this.pos += 4 + len;
+    return rBuf;
+  }
+}
+
+class Writer {
+  private chunks: Uint8Array[] = [];
+
+  private te = new TextEncoder();
+
+  /**
+   * Write raw bytes without any length-prefix.
+   */
+  writeRawBytes(b: Uint8Array): void {
+    this.chunks.push(b);
+  }
+
+  /**
+   * Write length-prefixed string.
+   */
+  writeString(s: string) {
+    const bufStr = this.te.encode(s);
+    this.writeUint32(bufStr.length);
+    this.chunks.push(bufStr);
+  }
+
+  writeUint8(n: number): void {
+    const buf = new Uint8Array(1);
+    const dv = new DataView(buf.buffer);
+    dv.setUint8(0, n);
+    this.chunks.push(buf);
+  }
+
+  writeUint16(n: number): void {
+    const buf = new Uint8Array(2);
+    const dv = new DataView(buf.buffer);
+    dv.setUint16(0, n);
+    this.chunks.push(buf);
+  }
+
+  writeUint32(n: number): void {
+    const buf = new Uint8Array(4);
+    const dv = new DataView(buf.buffer);
+    dv.setUint32(0, n);
+    this.chunks.push(buf);
+  }
+
+  writeInt64(n: bigint): void {
+    const buf = new Uint8Array(8);
+    const dv = new DataView(buf.buffer);
+    dv.setBigInt64(0, n);
+    this.chunks.push(buf);
+  }
+
+  reap(): Uint8Array {
+    return concatArr(this.chunks);
+  }
+}
+
+class Sqlite3Error extends Error {
+  // Name of "code" is to be compatible with better-sqlite3.
+  constructor(
+    message: string,
+    public code: string,
+  ) {
+    super(message);
+  }
+}
+
+function throwForFailure(rd: Reader): never {
+  const msg = rd.readString();
+  // Numeric error code
+  rd.readUint16();
+  const errName = rd.readString();
+  throw new Sqlite3Error(msg, errName);
+}
+
+function expectCommunicateSuccess(commRes: Uint8Array): void {
+  const rd = new Reader(commRes);
+  const respType = rd.readUint8();
+  if (respType == HelperResp.OK) {
+    // Good
+  } else if (respType == HelperResp.FAIL) {
+    throwForFailure(rd);
+  } else {
+    throw Error("unexpected response tag");
+  }
+}
+
+export async function createNodeHelperSqlite3Impl(
+  opts: { enableTracing?: boolean } = {},
+): Promise<Sqlite3Interface> {
+  const enableTracing = opts.enableTracing ?? false;
+  const helper = new Helper({ enableTracing });
+  const resp = await helper.communicate(HelperCmd.HELLO, new Uint8Array());
+
+  let counterDb = 1;
+  let counterPrep = 1;
+
+  return {
+    async open(filename: string): Promise<Sqlite3Database> {
+      if (enableTracing) {
+        console.error(`opening database ${filename}`);
+      }
+      const myDbId = counterDb++;
+      {
+        const wr = new Writer();
+        wr.writeUint16(myDbId);
+        wr.writeString(filename);
+        const payload = wr.reap();
+        const commRes = await helper.communicate(HelperCmd.OPEN, payload);
+        expectCommunicateSuccess(commRes);
+      }
+      if (enableTracing) {
+        console.error(`opened database ${filename}`);
+      }
+      return {
+        internalDbHandle: undefined,
+        async close() {
+          if (enableTracing) {
+            console.error(`closing database`);
+          }
+          const wr = new Writer();
+          wr.writeUint16(myDbId);
+          const payload = wr.reap();
+          const commRes = await helper.communicate(HelperCmd.CLOSE, payload);
+          expectCommunicateSuccess(commRes);
+        },
+        async prepare(stmtStr): Promise<Sqlite3Statement> {
+          const myPrepId = counterPrep++;
+          if (enableTracing) {
+            console.error(`preparing statement ${myPrepId}`);
+          }
+          {
+            const wr = new Writer();
+            wr.writeUint16(myDbId);
+            wr.writeUint16(myPrepId);
+            wr.writeString(stmtStr);
+            const payload = wr.reap();
+            const commRes = await helper.communicate(
+              HelperCmd.PREPARE,
+              payload,
+            );
+            expectCommunicateSuccess(commRes);
+          }
+          if (enableTracing) {
+            console.error(`prepared statement ${myPrepId}`);
+          }
+          return {
+            internalStatement: undefined,
+            async getAll(params?: BindParams): Promise<ResultRow[]> {
+              if (enableTracing) {
+                console.error(`running getAll`);
+              }
+              const wr = new Writer();
+              wr.writeUint16(myPrepId);
+              encodeParams(wr, params);
+              const payload = wr.reap();
+              const commRes = await helper.communicate(
+                HelperCmd.STMT_GET_ALL,
+                payload,
+              );
+              const rd = new Reader(commRes);
+              const respType = rd.readUint8();
+              if (respType === HelperResp.ROWLIST) {
+                const rows = decodeRowList(rd);
+                return rows;
+              } else if (respType === HelperResp.FAIL) {
+                throwForFailure(rd);
+              } else {
+                throw Error("unexpected result for getAll");
+              }
+            },
+            async getFirst(
+              params?: BindParams,
+            ): Promise<ResultRow | undefined> {
+              if (enableTracing) {
+                console.error(`running getFirst`);
+              }
+              const wr = new Writer();
+              wr.writeUint16(myPrepId);
+              encodeParams(wr, params);
+              const payload = wr.reap();
+              const commRes = await helper.communicate(
+                HelperCmd.STMT_GET_FIRST,
+                payload,
+              );
+              const rd = new Reader(commRes);
+              const respType = rd.readUint8();
+              if (respType === HelperResp.ROWLIST) {
+                const rows = decodeRowList(rd);
+                return rows[0];
+              } else if (respType === HelperResp.FAIL) {
+                throwForFailure(rd);
+              } else {
+                throw Error("unexpected result for getAll");
+              }
+            },
+            async run(params?: BindParams): Promise<RunResult> {
+              if (enableTracing) {
+                console.error(`running run`);
+              }
+              const wr = new Writer();
+              wr.writeUint16(myPrepId);
+              encodeParams(wr, params);
+              const payload = wr.reap();
+              const commRes = await helper.communicate(
+                HelperCmd.STMT_RUN,
+                payload,
+              );
+              if (enableTracing) {
+                console.error(`run got response`);
+              }
+              const rd = new Reader(commRes);
+              const respType = rd.readUint8();
+              if (respType === HelperResp.OK) {
+                if (enableTracing) {
+                  console.error(`run success (OK)`);
+                }
+                return {
+                  lastInsertRowid: 0,
+                };
+              } else if (respType === HelperResp.RUNRESULT) {
+                if (enableTracing) {
+                  console.error(`run success (RUNRESULT)`);
+                }
+                const lastInsertRowid = rd.readInt64();
+                return {
+                  lastInsertRowid,
+                };
+              } else if (respType === HelperResp.FAIL) {
+                if (enableTracing) {
+                  console.error(`run error (FAIL)`);
+                }
+                throwForFailure(rd);
+              } else {
+                throw Error("SQL run failed");
+              }
+            },
+          };
+        },
+        async exec(sqlStr: string): Promise<void> {
+          {
+            if (enableTracing) {
+              console.error(`running execute`);
+            }
+            const wr = new Writer();
+            wr.writeUint16(myDbId);
+            wr.writeString(sqlStr);
+            const payload = wr.reap();
+            const execRes = await helper.communicate(HelperCmd.EXEC, payload);
+            expectCommunicateSuccess(execRes);
+          }
+        },
+      };
+    },
+  };
+}
diff --git a/packages/idb-bridge/src/node-sqlite3-impl.ts 
b/packages/idb-bridge/src/node-sqlite3-impl.ts
deleted file mode 100644
index fa38d298f..000000000
--- a/packages/idb-bridge/src/node-sqlite3-impl.ts
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- This file is part of GNU Taler
- (C) 2023 Taler Systems S.A.
-
- GNU Taler is free software; you can redistribute it and/or modify it under the
- terms of the GNU General Public License as published by the Free Software
- Foundation; either version 3, or (at your option) any later version.
-
- GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
- WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
- A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License along with
- GNU Taler; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
- */
-
-// @ts-ignore: optional dependency
-import type Database from "better-sqlite3";
-import {
-  ResultRow,
-  Sqlite3Interface,
-  Sqlite3Statement,
-} from "./sqlite3-interface.js";
-
-export async function createNodeSqlite3Impl(): Promise<Sqlite3Interface> {
-  // @ts-ignore: optional dependency
-  const bsq = (await import("better-sqlite3")).default;
-
-  return {
-    open(filename: string) {
-      const internalDbHandle = bsq(filename);
-      return {
-        internalDbHandle,
-        close() {
-          internalDbHandle.close();
-        },
-        prepare(stmtStr): Sqlite3Statement {
-          const stmtHandle = internalDbHandle.prepare(stmtStr);
-          return {
-            internalStatement: stmtHandle,
-            getAll(params): ResultRow[] {
-              let res: ResultRow[];
-              if (params === undefined) {
-                res = stmtHandle.all() as ResultRow[];
-              } else {
-                res = stmtHandle.all(params) as ResultRow[];
-              }
-              return res;
-            },
-            getFirst(params): ResultRow | undefined {
-              let res: ResultRow | undefined;
-              if (params === undefined) {
-                res = stmtHandle.get() as ResultRow | undefined;
-              } else {
-                res = stmtHandle.get(params) as ResultRow | undefined;
-              }
-              return res;
-            },
-            run(params) {
-              const myParams = [];
-              if (params !== undefined) {
-                myParams.push(params);
-              }
-              // The better-sqlite3 library doesn't like it we pass
-              // undefined directly.
-              let res: Database.RunResult;
-              if (params !== undefined) {
-                res = stmtHandle.run(params);
-              } else {
-                res = stmtHandle.run();
-              }
-              return {
-                lastInsertRowid: res.lastInsertRowid,
-              };
-            },
-          };
-        },
-        exec(sqlStr): void {
-          internalDbHandle.exec(sqlStr);
-        },
-      };
-    },
-  };
-}
diff --git a/packages/idb-bridge/src/sqlite3-interface.ts 
b/packages/idb-bridge/src/sqlite3-interface.ts
index 8668ef844..434021420 100644
--- a/packages/idb-bridge/src/sqlite3-interface.ts
+++ b/packages/idb-bridge/src/sqlite3-interface.ts
@@ -1,15 +1,15 @@
 export type Sqlite3Database = {
   internalDbHandle: any;
-  exec(sqlStr: string): void;
-  prepare(stmtStr: string): Sqlite3Statement;
-  close(): void;
+  exec(sqlStr: string): Promise<void>;
+  prepare(stmtStr: string): Promise<Sqlite3Statement>;
+  close(): Promise<void>;
 };
 export type Sqlite3Statement = {
   internalStatement: any;
 
-  run(params?: BindParams): RunResult;
-  getAll(params?: BindParams): ResultRow[];
-  getFirst(params?: BindParams): ResultRow | undefined;
+  run(params?: BindParams): Promise<RunResult>;
+  getAll(params?: BindParams): Promise<ResultRow[]>;
+  getFirst(params?: BindParams): Promise<ResultRow | undefined>;
 };
 
 export interface RunResult {
@@ -30,5 +30,5 @@ export type ResultRow = Record<string, Sqlite3Value>;
  * to be used by our IndexedDB sqlite3 backend.
  */
 export interface Sqlite3Interface {
-  open(filename: string): Sqlite3Database;
+  open(filename: string): Promise<Sqlite3Database>;
 }
diff --git a/packages/idb-bridge/src/testingdb.ts 
b/packages/idb-bridge/src/testingdb.ts
index 6c13979ca..9f80cae74 100644
--- a/packages/idb-bridge/src/testingdb.ts
+++ b/packages/idb-bridge/src/testingdb.ts
@@ -16,7 +16,7 @@
 import { createSqliteBackend } from "./SqliteBackend.js";
 import { BridgeIDBFactory } from "./bridge-idb.js";
 import { IDBFactory } from "./idbtypes.js";
-import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+import { createNodeHelperSqlite3Impl } from "./node-helper-sqlite3-impl.js";
 
 let idbFactory: IDBFactory | undefined = undefined;
 
@@ -24,7 +24,11 @@ export async function initTestIndexedDB(): Promise<void> {
   // const backend = new MemoryBackend();
   // backend.enableTracing = true;
 
-  const sqlite3Impl = await createNodeSqlite3Impl();
+  const sqlite3Impl = await createNodeHelperSqlite3Impl({
+    enableTracing: false,
+  });
+
+  // const sqlite3Impl = await createNodeBetterSqlite3Impl();
 
   const backend = await createSqliteBackend(sqlite3Impl, {
     filename: ":memory:",
diff --git a/packages/idb-bridge/taler-helper-sqlite3 
b/packages/idb-bridge/taler-helper-sqlite3
new file mode 100755
index 000000000..02a936220
--- /dev/null
+++ b/packages/idb-bridge/taler-helper-sqlite3
@@ -0,0 +1,305 @@
+#!/usr/bin/env python
+
+import sqlite3
+import sys
+import os
+
+print("started sqlite3 helper at", os.getcwd(), file=sys.stderr)
+
+enable_tracing = False
+def trace(*args):
+    print("HELPER", *args, file=sys.stderr)
+
+CMD_HELLO = 1
+CMD_SHUTDOWN = 2
+CMD_OPEN = 3
+CMD_CLOSE = 4
+CMD_PREPARE = 5
+CMD_STMT_GET_ALL = 6
+CMD_STMT_GET_FIRST = 7
+CMD_STMT_RUN = 8
+CMD_EXEC = 9
+
+RESP_OK = 1
+RESP_FAIL = 2
+RESP_ROWLIST = 3
+RESP_RUNRESULT = 4
+RESP_STMT = 5
+
+
+TAG_NULL = 1
+TAG_INT = 2
+TAG_REAL = 3
+TAG_TEXT = 4
+TAG_BLOB = 5
+
+cmdstream = open(0, "rb")
+respstream = open(1, "wb")
+
+db_handles = dict()
+
+# Since python's sqlite3 library does not support prepared statements,
+# we fake it by just storing the string of the statement.
+# Internally, the sqlite3 library does its own caching of
+# prepared statements.
+prep_handles = dict()
+
+
+def write_resp(req_id, cmd, payload=None):
+    if enable_tracing:
+        trace("sending response to request", req_id)
+    outlen = 4 + 4 + 1 + (0 if payload is None else len(payload))
+    respstream.write(outlen.to_bytes(4))
+    respstream.write(req_id.to_bytes(4))
+    respstream.write(cmd.to_bytes(1))
+    if payload is not None:
+        respstream.write(payload)
+    respstream.flush()
+
+
+dbconn = None
+
+
+class PacketWriter:
+    def __init__(self):
+        self.chunks = []
+
+    def write_string(self, s):
+        buf = s.encode("utf-8")
+        self.write_uint32(len(buf))
+        self.write_raw_bytes(buf)
+
+    def write_bytes(self, buf):
+        self.write_uint32(len(buf))
+        self.write_raw_bytes(buf)
+
+    def write_raw_bytes(self, buf):
+        self.chunks.append(buf)
+
+    def write_uint8(self, n):
+        self.chunks.append(n.to_bytes(1))
+
+    def write_uint32(self, n):
+        self.chunks.append(n.to_bytes(4))
+
+    def write_uint16(self, n):
+        self.chunks.append(n.to_bytes(2))
+
+    def write_int64(self, n):
+        self.chunks.append(n.to_bytes(8, signed=True))
+
+    def write_rowlist(self, rows, description):
+        self.write_uint16(len(rows))
+        self.write_uint16(len(description))
+        for desc in description:
+            col_name = desc[0]
+            self.write_string(col_name)
+
+        if len(description) == 0 or len(rows) == 0:
+            return
+
+        for row in rows:
+            if len(row) != len(description):
+                raise Error("invariant violated")
+            for val in row:
+                if val is None:
+                    self.write_uint8(TAG_NULL)
+                elif isinstance(val, str):
+                    self.write_uint8(TAG_TEXT)
+                    self.write_string(val)
+                elif isinstance(val, bytes):
+                    self.write_uint8(TAG_BLOB)
+                    self.write_bytes(val)
+                elif isinstance(val, int):
+                    self.write_uint8(TAG_INT)
+                    self.write_int64(val)
+                else:
+                    raise Error("unknown col type")
+
+    def reap(self):
+        return b"".join(self.chunks)
+
+
+class PacketReader:
+    def __init__(self, data):
+        self.data = data
+        self.pos = 0
+
+    def read_string(self):
+        l = self.read_uint32()
+        d = self.data[self.pos : self.pos + l]
+        self.pos += l
+        return d.decode("utf-8")
+
+    def read_blob(self):
+        l = self.read_uint32()
+        d = self.data[self.pos : self.pos + l]
+        self.pos += l
+        return d
+
+    def read_uint16(self):
+        d = int.from_bytes(self.data[self.pos : self.pos + 2])
+        self.pos += 2
+        return d
+
+    def read_uint32(self):
+        d = int.from_bytes(self.data[self.pos : self.pos + 4])
+        self.pos += 4
+        return d
+
+    def read_int64(self):
+        d = int.from_bytes(self.data[self.pos : self.pos + 8], signed=True)
+        self.pos += 8
+        return d
+
+    def read_uint8(self):
+        d = self.data[self.pos]
+        self.pos += 1
+        return d
+
+    def read_params(self):
+        num_args = pr.read_uint16()
+        params = dict()
+        for x in range(num_args):
+            name = pr.read_string()
+            tag = pr.read_uint8()
+            if tag == TAG_NULL:
+                params[name] = None
+                continue
+            if tag == TAG_INT:
+                params[name] = pr.read_int64()
+                continue
+            if tag == TAG_TEXT:
+                params[name] = pr.read_string()
+                continue
+            if tag == TAG_BLOB:
+                params[name] = pr.read_blob()
+                continue
+            raise Error("tag not understood")
+        return params
+
+def read_exactly(n):
+    buf = cmdstream.read(n)
+    if len(buf) != n:
+        raise Error("incomplete message") 
+    return buf
+
+def handle_query_failure(req_id, e):
+    pw = PacketWriter()
+    pw.write_string(str(e))
+    pw.write_uint16(e.sqlite_errorcode)
+    pw.write_string(e.sqlite_errorname)
+    write_resp(req_id, RESP_FAIL, pw.reap())
+
+while True:
+    if enable_tracing:
+        trace("reading command")
+    buf_sz = cmdstream.read(4)
+    if len(buf_sz) == 0:
+        trace("end of input reached")
+        sys.exit(0)
+    elif len(buf_sz) != 4:
+        raise Error("incomplete message")
+    size = int.from_bytes(buf_sz)
+    req_id = int.from_bytes(read_exactly(4))
+    rest = read_exactly(size - 8)
+    pr = PacketReader(rest)
+    cmd = pr.read_uint8()
+    if enable_tracing:
+        trace("received command:", cmd, "request_id:", req_id)
+
+    if cmd == CMD_HELLO:
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_OPEN:
+        # open
+        if dbconn is not None:
+            raise Error("DB already connected")
+        db_handle = pr.read_uint16()
+        filename = pr.read_string()
+        dbconn = sqlite3.connect(filename, autocommit=True, 
isolation_level=None)
+        # Make sure we are not in a transaction
+        dbconn.commit()
+        db_handles[db_handle] = dbconn
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_CLOSE:
+        # close
+        dbconn.close()
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_PREPARE:
+        db_id = pr.read_uint16()
+        prep_id = pr.read_uint16()
+        sql = pr.read_string()
+        prep_handles[prep_id] = (dbconn, sql)
+        write_resp(req_id, RESP_OK)
+        continue
+    if cmd == CMD_STMT_GET_ALL:
+        prep_id = pr.read_uint16()
+        params = pr.read_params()
+        dbconn, stmt = prep_handles[prep_id]
+        cursor = dbconn.cursor()
+        try:
+            res = cursor.execute(stmt, params)
+            rows = cursor.fetchall()
+        except sqlite3.Error as e:
+            handle_query_failure(req_id, e)
+            continue
+        pw = PacketWriter()
+        pw.write_rowlist(rows, cursor.description)
+        write_resp(req_id, RESP_ROWLIST, pw.reap())
+        continue
+    if cmd == CMD_STMT_GET_FIRST:
+        prep_id = pr.read_uint16()
+        params = pr.read_params()
+        dbconn, stmt = prep_handles[prep_id]
+        cursor = dbconn.cursor()
+        try:
+            res = cursor.execute(stmt, params)
+            row = cursor.fetchone()
+        except sqlite3.Error as e:
+            handle_query_failure(req_id, e)
+            continue
+        pw = PacketWriter()
+        rows = [row] if row is not None else []
+        pw.write_rowlist(rows, cursor.description)
+        write_resp(req_id, RESP_ROWLIST, pw.reap())
+        continue
+    if cmd == CMD_STMT_RUN:
+        if enable_tracing:
+            trace("running statement")
+        prep_id = pr.read_uint16()
+        params = pr.read_params()
+        dbconn, stmt = prep_handles[prep_id]
+        cursor = dbconn.cursor()
+        try:
+            res = cursor.execute(stmt, params)
+        except sqlite3.Error as e:
+            if enable_tracing:
+                trace("got sqlite error")
+            handle_query_failure(req_id, e)
+            continue
+        if enable_tracing:
+            trace("running query succeeded")
+        if cursor.lastrowid is None:
+            write_resp(req_id, RESP_OK)
+        else:
+            pw = PacketWriter()
+            pw.write_int64(cursor.lastrowid)
+            payload = pw.reap()
+            write_resp(req_id, RESP_RUNRESULT, payload)
+        continue
+    if cmd == CMD_EXEC:
+        db_id = pr.read_uint16()
+        sql = pr.read_string()
+        dbconn = db_handles[db_id]
+        try:
+            dbconn.executescript(sql)
+        except sqlite3.Error as e:
+            handle_query_failure(req_id, e)
+            continue
+        write_resp(req_id, RESP_OK)
+        continue
+
+    print("unknown command", file=sys.stderr)
diff --git a/packages/taler-wallet-cli/Makefile 
b/packages/taler-wallet-cli/Makefile
index c8529c768..286988ab1 100644
--- a/packages/taler-wallet-cli/Makefile
+++ b/packages/taler-wallet-cli/Makefile
@@ -35,8 +35,7 @@ install-nodeps:
        install ./dist/taler-wallet-cli-bundled.cjs $(DESTDIR)$(NODEDIR)/dist/
        install ./dist/taler-wallet-cli-bundled.cjs.map 
$(DESTDIR)$(NODEDIR)/dist/
        install ./bin/taler-wallet-cli.mjs $(DESTDIR)$(NODEDIR)/bin/
-       install 
../idb-bridge/node_modules/better-sqlite3/build/Release/better_sqlite3.node 
$(DESTDIR)$(LIBDIR)/build/ \
-               || echo "sqlite3 unavailable, better-sqlite3 native module not 
found"
+       install ../idb-bridge/taler-helper-sqlite3 
$(DESTDIR)$(BINDIR)/taler-helper-sqlite3
        ln -sf 
../lib/taler-wallet-cli/node_modules/taler-wallet-cli/bin/taler-wallet-cli.mjs 
$(DESTDIR)$(BINDIR)/taler-wallet-cli
 deps:
        pnpm install --frozen-lockfile --filter @gnu-taler/taler-wallet-cli...
diff --git a/packages/taler-wallet-cli/build-qtart.mjs 
b/packages/taler-wallet-cli/build-qtart.mjs
index cd2ff98e2..51bfc3efe 100755
--- a/packages/taler-wallet-cli/build-qtart.mjs
+++ b/packages/taler-wallet-cli/build-qtart.mjs
@@ -62,7 +62,7 @@ export const buildConfig = {
   conditions: ["qtart"],
   sourcemap: true,
   // quickjs standard library
-  external: ["std", "os", "better-sqlite3"],
+  external: ["std", "os", "node:child_process"],
   define: {
     __VERSION__: `"${_package.version}"`,
     __GIT_HASH__: `"${GIT_HASH}"`,
diff --git a/packages/taler-wallet-core/src/host-impl.node.ts 
b/packages/taler-wallet-core/src/host-impl.node.ts
index 9eddd151e..2b2639895 100644
--- a/packages/taler-wallet-core/src/host-impl.node.ts
+++ b/packages/taler-wallet-core/src/host-impl.node.ts
@@ -29,7 +29,9 @@ import {
   createSqliteBackend,
   shimIndexedDB,
 } from "@gnu-taler/idb-bridge";
-import { createNodeSqlite3Impl } from 
"@gnu-taler/idb-bridge/node-sqlite3-bindings";
+import {
+  createNodeHelperSqlite3Impl,
+} from "@gnu-taler/idb-bridge/node-helper-sqlite3-impl";
 import {
   Logger,
   SetTimeoutTimerAPI,
@@ -113,7 +115,7 @@ async function makeSqliteDb(
   } else {
     BridgeIDBFactory.enableTracing = false;
   }
-  const imp = await createNodeSqlite3Impl();
+  const imp = await createNodeHelperSqlite3Impl();
   const dbFilename = getSqlite3FilenameFromStoragePath(
     args.persistentStoragePath,
   );
diff --git a/packages/taler-wallet-core/src/host-impl.qtart.ts 
b/packages/taler-wallet-core/src/host-impl.qtart.ts
index 40210f0eb..5cf7af61a 100644
--- a/packages/taler-wallet-core/src/host-impl.qtart.ts
+++ b/packages/taler-wallet-core/src/host-impl.qtart.ts
@@ -66,32 +66,32 @@ export async function createQtartSqlite3Impl(): 
Promise<Sqlite3Interface> {
     throw Error("globalThis._qtart not defined");
   }
   return {
-    open(filename: string) {
+    async open(filename: string) {
       const internalDbHandle = tart.sqlite3Open(filename);
       return {
         internalDbHandle,
-        close() {
+        async close() {
           tart.sqlite3Close(internalDbHandle);
         },
-        prepare(stmtStr): Sqlite3Statement {
+        async prepare(stmtStr): Promise<Sqlite3Statement> {
           const stmtHandle = tart.sqlite3Prepare(internalDbHandle, stmtStr);
           return {
             internalStatement: stmtHandle,
-            getAll(params): ResultRow[] {
+            async getAll(params): Promise<ResultRow[]> {
               numStmt++;
               return tart.sqlite3StmtGetAll(stmtHandle, params);
             },
-            getFirst(params): ResultRow | undefined {
+            async getFirst(params): Promise<ResultRow | undefined> {
               numStmt++;
               return tart.sqlite3StmtGetFirst(stmtHandle, params);
             },
-            run(params) {
+            async run(params) {
               numStmt++;
               return tart.sqlite3StmtRun(stmtHandle, params);
             },
           };
         },
-        exec(sqlStr): void {
+        async exec(sqlStr): Promise<void> {
           numStmt++;
           tart.sqlite3Exec(internalDbHandle, sqlStr);
         },
diff --git a/packages/taler-wallet-embedded/build.mjs 
b/packages/taler-wallet-embedded/build.mjs
index 09b47ee96..60a9603e5 100755
--- a/packages/taler-wallet-embedded/build.mjs
+++ b/packages/taler-wallet-embedded/build.mjs
@@ -56,7 +56,7 @@ export const buildConfig = {
   bundle: true,
   minify: false,
   target: ["es2020"],
-  external: ["os", "std", "better-sqlite3"],
+  external: ["os", "std", "node:child_process", "node:fs"],
   format: "esm",
   platform: "neutral",
   mainFields: ["module", "main"],
diff --git a/packages/web-util/build.mjs b/packages/web-util/build.mjs
index 7c0e4400f..28dcc920c 100755
--- a/packages/web-util/build.mjs
+++ b/packages/web-util/build.mjs
@@ -126,7 +126,7 @@ const buildConfigBuild = {
   },
   format: "esm",
   platform: "node",
-  external: ["esbuild"],
+  external: ["esbuild", "node:child_process"],
   // https://github.com/evanw/esbuild/issues/1921
   // How to fix "Dynamic require of "os" is not supported"
   // esbuild cannot convert external "static" commonjs require statements to 
static esm imports
@@ -152,7 +152,7 @@ const buildConfigTesting = {
   },
   format: "esm",
   platform: "browser",
-  external: ["preact", "@gnu-taler/taler-util", "jed", "swr", "axios"],
+  external: ["preact", "@gnu-taler/taler-util", "jed", "swr", "axios", 
"node:child_process"],
   jsxFactory: "h",
   jsxFragment: "Fragment",
 };
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 0b03cbbdc..5e5c95948 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -510,10 +510,6 @@ importers:
       tslib:
         specifier: ^2.6.2
         version: 2.6.2
-    optionalDependencies:
-      better-sqlite3:
-        specifier: 10.0.0
-        version: 10.0.0
     devDependencies:
       '@types/better-sqlite3':
         specifier: ^7.6.8
@@ -1558,6 +1554,7 @@ packages:
   '@babel/plugin-proposal-async-generator-functions@7.19.1':
     resolution: {integrity: 
sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-async-generator-functions instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
@@ -1571,6 +1568,7 @@ packages:
   '@babel/plugin-proposal-class-static-block@7.18.6':
     resolution: {integrity: 
sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-class-static-block instead.
     peerDependencies:
       '@babel/core': ^7.12.0
 
@@ -1583,6 +1581,7 @@ packages:
   '@babel/plugin-proposal-dynamic-import@7.18.6':
     resolution: {integrity: 
sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-dynamic-import instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
@@ -1596,24 +1595,28 @@ packages:
   '@babel/plugin-proposal-json-strings@7.18.6':
     resolution: {integrity: 
sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-json-strings instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
   '@babel/plugin-proposal-logical-assignment-operators@7.18.9':
     resolution: {integrity: 
sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-logical-assignment-operators instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
   '@babel/plugin-proposal-nullish-coalescing-operator@7.18.6':
     resolution: {integrity: 
sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-nullish-coalescing-operator instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
   '@babel/plugin-proposal-numeric-separator@7.18.6':
     resolution: {integrity: 
sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-numeric-separator instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
@@ -1627,18 +1630,21 @@ packages:
   '@babel/plugin-proposal-optional-catch-binding@7.18.6':
     resolution: {integrity: 
sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-optional-catch-binding instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
   '@babel/plugin-proposal-optional-chaining@7.18.9':
     resolution: {integrity: 
sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-optional-chaining instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
   '@babel/plugin-proposal-private-methods@7.18.6':
     resolution: {integrity: 
sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA==}
     engines: {node: '>=6.9.0'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-private-methods instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
@@ -1658,6 +1664,7 @@ packages:
   '@babel/plugin-proposal-unicode-property-regex@7.18.6':
     resolution: {integrity: 
sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w==}
     engines: {node: '>=4'}
+    deprecated: This proposal has been merged to the ECMAScript standard and 
thus this plugin is no longer maintained. Please use 
@babel/plugin-transform-unicode-property-regex instead.
     peerDependencies:
       '@babel/core': ^7.0.0-0
 
@@ -3804,6 +3811,7 @@ packages:
 
   babel-merge@3.0.0:
     resolution: {integrity: 
sha512-eBOBtHnzt9xvnjpYNI5HmaPp/b2vMveE5XggzqHnQeHJ8mFIBrBv6WZEVIj5jJ2uwTItkqKo9gWzEEcBxEq0yw==}
+    deprecated: Package no longer supported. Contact Support at 
https://www.npmjs.com/support for more info.
     peerDependencies:
       '@babel/core': ^7.0.0
 
@@ -3866,9 +3874,6 @@ packages:
   bcrypt-pbkdf@1.0.2:
     resolution: {integrity: 
sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==}
 
-  better-sqlite3@10.0.0:
-    resolution: {integrity: 
sha512-rOz0JY8bt9oMgrFssP7GnvA5R3yln73y/NizzWqy3WlFth8Ux8+g4r/N9fjX97nn4X1YX6MTER2doNpTu5pqiA==}
-
   big-integer@1.6.52:
     resolution: {integrity: 
sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==}
     engines: {node: '>=0.6'}
@@ -4844,10 +4849,6 @@ packages:
     resolution: {integrity: 
sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==}
     engines: {node: '>=8'}
 
-  detect-libc@2.0.3:
-    resolution: {integrity: 
sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==}
-    engines: {node: '>=8'}
-
   detect-node@2.1.0:
     resolution: {integrity: 
sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==}
 
@@ -5278,16 +5279,19 @@ packages:
   eslint@7.32.0:
     resolution: {integrity: 
sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==}
     engines: {node: ^10.12.0 || >=12.0.0}
+    deprecated: This version is no longer supported. Please see 
https://eslint.org/version-support for other options.
     hasBin: true
 
   eslint@8.26.0:
     resolution: {integrity: 
sha512-kzJkpaw1Bfwheq4VXUezFriD1GxszX6dUekM7Z3aC2o4hju+tsR/XyTC3RcoSD7jmy9VkPU3+N6YjVU2e96Oyg==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    deprecated: This version is no longer supported. Please see 
https://eslint.org/version-support for other options.
     hasBin: true
 
   eslint@8.56.0:
     resolution: {integrity: 
sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==}
     engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+    deprecated: This version is no longer supported. Please see 
https://eslint.org/version-support for other options.
     hasBin: true
 
   esm@3.2.25:
@@ -5375,10 +5379,6 @@ packages:
     resolution: {integrity: 
sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA==}
     engines: {node: '>=0.10.0'}
 
-  expand-template@2.0.3:
-    resolution: {integrity: 
sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==}
-    engines: {node: '>=6'}
-
   express@4.18.2:
     resolution: {integrity: 
sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==}
     engines: {node: '>= 0.10.0'}
@@ -5608,9 +5608,6 @@ packages:
   fromentries@1.3.2:
     resolution: {integrity: 
sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==}
 
-  fs-constants@1.0.0:
-    resolution: {integrity: 
sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==}
-
   fs-extra@11.1.0:
     resolution: {integrity: 
sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw==}
     engines: {node: '>=14.14'}
@@ -5648,7 +5645,7 @@ packages:
     resolution: {integrity: 
sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==}
     engines: {node: '>= 4.0'}
     os: [darwin]
-    deprecated: The v1 package contains DANGEROUS / INSECURE binaries. Upgrade 
to safe fsevents v2
+    deprecated: Upgrade to fsevents v2 to mitigate potential security issues
 
   fsevents@2.3.3:
     resolution: {integrity: 
sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
@@ -5740,9 +5737,6 @@ packages:
   gettext-parser@1.1.0:
     resolution: {integrity: 
sha512-zL3eayB0jF+cr6vogH/VJKoKcj7uQj2TPByaaj6a4k/3elk9iq7fiwCM2FqdzS/umo021RetSanVisarzeb9Wg==}
 
-  github-from-package@0.0.0:
-    resolution: {integrity: 
sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==}
-
   gittar@0.1.1:
     resolution: {integrity: 
sha512-p+XuqWJpW9ahUuNTptqeFjudFq31o6Jd+maMBarkMAR5U3K9c7zJB4sQ4BV8mIqrTOV29TtqikDhnZfCD4XNfQ==}
     engines: {node: '>=4'}
@@ -7160,9 +7154,6 @@ packages:
     resolution: {integrity: 
sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==}
     engines: {node: '>=0.10.0'}
 
-  mkdirp-classic@0.5.3:
-    resolution: {integrity: 
sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==}
-
   mkdirp@0.5.6:
     resolution: {integrity: 
sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==}
     hasBin: true
@@ -7258,9 +7249,6 @@ packages:
     resolution: {integrity: 
sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==}
     engines: {node: '>=0.10.0'}
 
-  napi-build-utils@1.0.2:
-    resolution: {integrity: 
sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==}
-
   native-url@0.3.4:
     resolution: {integrity: 
sha512-6iM8R99ze45ivyH8vybJ7X0yekIcPf5GgLV5K0ENCbmRcaRIDoj37BC8iLEmaaBfqqb8enuZ5p0uhY+lVAbAcA==}
 
@@ -7284,10 +7272,6 @@ packages:
   no-case@3.0.4:
     resolution: {integrity: 
sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==}
 
-  node-abi@3.62.0:
-    resolution: {integrity: 
sha512-CPMcGa+y33xuL1E0TcNIu4YyaZCxnnvkVaEXrsosR3FxN+fV8xvb7Mzpb7IgKler10qeMkE6+Dp8qJhpzdq35g==}
-    engines: {node: '>=10'}
-
   node-domexception@1.0.0:
     resolution: {integrity: 
sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
     engines: {node: '>=10.5.0'}
@@ -8199,11 +8183,6 @@ packages:
   preact@10.11.3:
     resolution: {integrity: 
sha512-eY93IVpod/zG3uMF22Unl8h9KkrcKIRs2EGar8hwLZZDU1lkjph303V9HZBwufh2s736U6VXuhD109LYqPoffg==}
 
-  prebuild-install@7.1.2:
-    resolution: {integrity: 
sha512-UnNke3IQb6sgarcZIDU3gbMeTp/9SSU1DAIkil7PrqG1vZlBtY5msYccSKSHDqa3hNg436IXK+SNImReuA1wEQ==}
-    engines: {node: '>=10'}
-    hasBin: true
-
   prelude-ls@1.1.2:
     resolution: {integrity: 
sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==}
     engines: {node: '>= 0.8.0'}
@@ -8886,12 +8865,6 @@ packages:
     resolution: {integrity: 
sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==}
     engines: {node: '>=14'}
 
-  simple-concat@1.0.1:
-    resolution: {integrity: 
sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==}
-
-  simple-get@4.0.1:
-    resolution: {integrity: 
sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==}
-
   simple-swizzle@0.2.2:
     resolution: {integrity: 
sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==}
 
@@ -9268,13 +9241,6 @@ packages:
     resolution: {integrity: 
sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==}
     engines: {node: '>=6'}
 
-  tar-fs@2.1.1:
-    resolution: {integrity: 
sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==}
-
-  tar-stream@2.2.0:
-    resolution: {integrity: 
sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==}
-    engines: {node: '>=6'}
-
   tar@4.4.19:
     resolution: {integrity: 
sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA==}
     engines: {node: '>=4.5'}
@@ -14286,12 +14252,6 @@ snapshots:
     dependencies:
       tweetnacl: 0.14.5
 
-  better-sqlite3@10.0.0:
-    dependencies:
-      bindings: 1.5.0
-      prebuild-install: 7.1.2
-    optional: true
-
   big-integer@1.6.52: {}
 
   big.js@3.2.0: {}
@@ -15524,9 +15484,6 @@ snapshots:
 
   detect-libc@2.0.2: {}
 
-  detect-libc@2.0.3:
-    optional: true
-
   detect-node@2.1.0: {}
 
   didyoumean@1.2.2: {}
@@ -16351,9 +16308,6 @@ snapshots:
     transitivePeerDependencies:
       - supports-color
 
-  expand-template@2.0.3:
-    optional: true
-
   express@4.18.2:
     dependencies:
       accepts: 1.3.8
@@ -16631,9 +16585,6 @@ snapshots:
 
   fromentries@1.3.2: {}
 
-  fs-constants@1.0.0:
-    optional: true
-
   fs-extra@11.1.0:
     dependencies:
       graceful-fs: 4.2.11
@@ -16772,9 +16723,6 @@ snapshots:
     dependencies:
       encoding: 0.1.13
 
-  github-from-package@0.0.0:
-    optional: true
-
   gittar@0.1.1:
     dependencies:
       mkdirp: 0.5.6
@@ -18209,9 +18157,6 @@ snapshots:
       for-in: 1.0.2
       is-extendable: 1.0.1
 
-  mkdirp-classic@0.5.3:
-    optional: true
-
   mkdirp@0.5.6:
     dependencies:
       minimist: 1.2.8
@@ -18353,9 +18298,6 @@ snapshots:
     transitivePeerDependencies:
       - supports-color
 
-  napi-build-utils@1.0.2:
-    optional: true
-
   native-url@0.3.4:
     dependencies:
       querystring: 0.2.1
@@ -18378,11 +18320,6 @@ snapshots:
       lower-case: 2.0.2
       tslib: 2.6.2
 
-  node-abi@3.62.0:
-    dependencies:
-      semver: 7.6.2
-    optional: true
-
   node-domexception@1.0.0: {}
 
   node-fetch@2.7.0(encoding@0.1.13):
@@ -19500,22 +19437,6 @@ snapshots:
 
   preact@10.11.3: {}
 
-  prebuild-install@7.1.2:
-    dependencies:
-      detect-libc: 2.0.3
-      expand-template: 2.0.3
-      github-from-package: 0.0.0
-      minimist: 1.2.8
-      mkdirp-classic: 0.5.3
-      napi-build-utils: 1.0.2
-      node-abi: 3.62.0
-      pump: 3.0.0
-      rc: 1.2.8
-      simple-get: 4.0.1
-      tar-fs: 2.1.1
-      tunnel-agent: 0.6.0
-    optional: true
-
   prelude-ls@1.1.2: {}
 
   prelude-ls@1.2.1: {}
@@ -20248,16 +20169,6 @@ snapshots:
 
   signal-exit@4.1.0: {}
 
-  simple-concat@1.0.1:
-    optional: true
-
-  simple-get@4.0.1:
-    dependencies:
-      decompress-response: 6.0.0
-      once: 1.4.0
-      simple-concat: 1.0.1
-    optional: true
-
   simple-swizzle@0.2.2:
     dependencies:
       is-arrayish: 0.3.2
@@ -20759,23 +20670,6 @@ snapshots:
 
   tapable@2.2.1: {}
 
-  tar-fs@2.1.1:
-    dependencies:
-      chownr: 1.1.4
-      mkdirp-classic: 0.5.3
-      pump: 3.0.0
-      tar-stream: 2.2.0
-    optional: true
-
-  tar-stream@2.2.0:
-    dependencies:
-      bl: 4.1.0
-      end-of-stream: 1.4.4
-      fs-constants: 1.0.0
-      inherits: 2.0.4
-      readable-stream: 3.6.2
-    optional: true
-
   tar@4.4.19:
     dependencies:
       chownr: 1.1.4

-- 
To stop receiving notification emails like this one, please contact
gnunet@gnunet.org.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]