aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFlorian Dold <florian@dold.me>2023-07-11 15:41:48 +0200
committerFlorian Dold <florian@dold.me>2023-08-22 08:01:13 +0200
commitb2d0ad57ddf251a109d536cdc49fb6505dbdc50c (patch)
tree7eaeca3ad8ec97a9c1970c1004feda2d61c3441b
parent58fdf9dc091b076787a9746c405fe6a9366f5da6 (diff)
sqlite3 backend for idb-bridge / wallet-core
-rw-r--r--packages/idb-bridge/package.json22
-rw-r--r--packages/idb-bridge/src/MemoryBackend.test.ts602
-rw-r--r--packages/idb-bridge/src/MemoryBackend.ts367
-rw-r--r--packages/idb-bridge/src/SqliteBackend.test.ts83
-rw-r--r--packages/idb-bridge/src/SqliteBackend.ts2301
-rw-r--r--packages/idb-bridge/src/backend-common.ts29
-rw-r--r--packages/idb-bridge/src/backend-interface.ts142
-rw-r--r--packages/idb-bridge/src/backends.test.ts740
-rw-r--r--packages/idb-bridge/src/bridge-idb.ts516
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts3
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts7
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts3
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts6
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts27
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts6
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts6
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts3
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts6
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts4
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/value.test.ts8
-rw-r--r--packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts27
-rw-r--r--packages/idb-bridge/src/idbpromutil.ts26
-rw-r--r--packages/idb-bridge/src/idbtypes.ts23
-rw-r--r--packages/idb-bridge/src/index.ts11
-rw-r--r--packages/idb-bridge/src/node-sqlite3-impl.ts84
-rw-r--r--packages/idb-bridge/src/sqlite3-interface.ts34
-rw-r--r--packages/idb-bridge/src/testingdb.ts43
-rw-r--r--packages/idb-bridge/src/util/FakeDomEvent.ts103
-rw-r--r--packages/idb-bridge/src/util/FakeEventTarget.ts2
-rw-r--r--packages/idb-bridge/src/util/extractKey.ts4
-rw-r--r--packages/idb-bridge/src/util/key-storage.test.ts39
-rw-r--r--packages/idb-bridge/src/util/key-storage.ts363
-rw-r--r--packages/idb-bridge/src/util/makeStoreKeyValue.test.ts66
-rw-r--r--packages/idb-bridge/src/util/makeStoreKeyValue.ts20
-rw-r--r--packages/idb-bridge/src/util/queueTask.ts5
-rw-r--r--packages/idb-bridge/src/util/structuredClone.test.ts61
-rw-r--r--packages/idb-bridge/src/util/structuredClone.ts231
-rw-r--r--packages/idb-bridge/src/util/valueToKey.ts6
-rw-r--r--packages/idb-bridge/tsconfig.json2
-rw-r--r--packages/taler-util/package.json1
-rw-r--r--packages/taler-util/src/index.qtart.ts27
-rw-r--r--packages/taler-util/src/transactions-types.ts3
-rw-r--r--packages/taler-util/src/wallet-types.ts7
-rw-r--r--packages/taler-wallet-cli/Makefile4
-rw-r--r--packages/taler-wallet-cli/README.md5
-rwxr-xr-xpackages/taler-wallet-cli/bin/taler-wallet-cli-local.mjs8
-rwxr-xr-xpackages/taler-wallet-cli/build-qtart.mjs2
-rw-r--r--packages/taler-wallet-core/src/db.ts5
-rw-r--r--packages/taler-wallet-core/src/host-common.ts2
-rw-r--r--packages/taler-wallet-core/src/host-impl.node.ts69
-rw-r--r--packages/taler-wallet-core/src/host-impl.qtart.ts116
-rw-r--r--packages/taler-wallet-core/src/host.ts1
-rw-r--r--packages/taler-wallet-core/src/operations/pending.ts350
-rw-r--r--packages/taler-wallet-core/src/operations/testing.ts3
-rw-r--r--packages/taler-wallet-core/src/operations/transactions.ts31
-rw-r--r--packages/taler-wallet-core/src/util/query.ts4
-rw-r--r--packages/taler-wallet-core/src/wallet.ts1
-rwxr-xr-xpackages/taler-wallet-embedded/build.mjs2
-rw-r--r--packages/taler-wallet-embedded/src/wallet-qjs.ts22
-rw-r--r--pnpm-lock.yaml474
75 files changed, 5742 insertions, 1486 deletions
diff --git a/packages/idb-bridge/package.json b/packages/idb-bridge/package.json
index 88ff8a1c2..2677c302f 100644
--- a/packages/idb-bridge/package.json
+++ b/packages/idb-bridge/package.json
@@ -18,22 +18,26 @@
"exports": {
".": {
"default": "./lib/index.js"
+ },
+ "./node-sqlite3-bindings": {
+ "default": "./lib/node-sqlite3-impl.js"
}
},
"devDependencies": {
- "@types/node": "^18.11.17",
- "ava": "^4.3.3",
- "esm": "^3.2.25",
+ "@types/better-sqlite3": "^7.6.4",
+ "@types/node": "^20.4.1",
+ "ava": "^5.3.1",
"prettier": "^2.8.8",
- "rimraf": "^3.0.2",
- "typescript": "^5.1.3"
+ "rimraf": "^5.0.1",
+ "typescript": "^5.1.6"
},
"dependencies": {
- "tslib": "^2.5.3"
+ "tslib": "^2.6.0"
},
"ava": {
- "require": [
- "esm"
- ]
+ "failFast": true
+ },
+ "optionalDependencies": {
+ "better-sqlite3": "^8.4.0"
}
}
diff --git a/packages/idb-bridge/src/MemoryBackend.test.ts b/packages/idb-bridge/src/MemoryBackend.test.ts
index 8a544a201..a851309ed 100644
--- a/packages/idb-bridge/src/MemoryBackend.test.ts
+++ b/packages/idb-bridge/src/MemoryBackend.test.ts
@@ -15,334 +15,9 @@
*/
import test from "ava";
-import {
- BridgeIDBCursorWithValue,
- BridgeIDBDatabase,
- BridgeIDBFactory,
- BridgeIDBKeyRange,
- BridgeIDBRequest,
- BridgeIDBTransaction,
-} from "./bridge-idb.js";
-import {
- IDBCursorDirection,
- IDBCursorWithValue,
- IDBDatabase,
- IDBKeyRange,
- IDBValidKey,
-} from "./idbtypes.js";
import { MemoryBackend } from "./MemoryBackend.js";
-
-function promiseFromRequest(request: BridgeIDBRequest): Promise<any> {
- return new Promise((resolve, reject) => {
- request.onsuccess = () => {
- resolve(request.result);
- };
- request.onerror = () => {
- reject(request.error);
- };
- });
-}
-
-function promiseFromTransaction(
- transaction: BridgeIDBTransaction,
-): Promise<void> {
- return new Promise<void>((resolve, reject) => {
- transaction.oncomplete = () => {
- resolve();
- };
- transaction.onerror = () => {
- reject();
- };
- });
-}
-
-test("Spec: Example 1 Part 1", async (t) => {
- const backend = new MemoryBackend();
- const idb = new BridgeIDBFactory(backend);
-
- const request = idb.open("library");
- request.onupgradeneeded = () => {
- const db = request.result;
- const store = db.createObjectStore("books", { keyPath: "isbn" });
- const titleIndex = store.createIndex("by_title", "title", { unique: true });
- const authorIndex = store.createIndex("by_author", "author");
-
- // Populate with initial data.
- store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
- store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
- store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
- };
-
- await promiseFromRequest(request);
- t.pass();
-});
-
-test("Spec: Example 1 Part 2", async (t) => {
- const backend = new MemoryBackend();
- const idb = new BridgeIDBFactory(backend);
-
- const request = idb.open("library");
- request.onupgradeneeded = () => {
- const db = request.result;
- const store = db.createObjectStore("books", { keyPath: "isbn" });
- const titleIndex = store.createIndex("by_title", "title", { unique: true });
- const authorIndex = store.createIndex("by_author", "author");
- };
-
- const db: BridgeIDBDatabase = await promiseFromRequest(request);
-
- t.is(db.name, "library");
-
- const tx = db.transaction("books", "readwrite");
- tx.oncomplete = () => {
- console.log("oncomplete called");
- };
-
- const store = tx.objectStore("books");
-
- store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
- store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
- store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
-
- await promiseFromTransaction(tx);
-
- t.pass();
-});
-
-test("Spec: Example 1 Part 3", async (t) => {
- const backend = new MemoryBackend();
- backend.enableTracing = true;
- const idb = new BridgeIDBFactory(backend);
-
- const request = idb.open("library");
- request.onupgradeneeded = () => {
- const db = request.result;
- const store = db.createObjectStore("books", { keyPath: "isbn" });
- const titleIndex = store.createIndex("by_title", "title", { unique: true });
- const authorIndex = store.createIndex("by_author", "author");
- };
-
- const db: BridgeIDBDatabase = await promiseFromRequest(request);
-
- t.is(db.name, "library");
-
- const tx = db.transaction("books", "readwrite");
-
- const store = tx.objectStore("books");
-
- store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
- store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
- store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
-
- await promiseFromTransaction(tx);
-
- const tx2 = db.transaction("books", "readonly");
- const store2 = tx2.objectStore("books");
- var index2 = store2.index("by_title");
- const request2 = index2.get("Bedrock Nights");
- const result2: any = await promiseFromRequest(request2);
-
- t.is(result2.author, "Barney");
-
- const tx3 = db.transaction(["books"], "readonly");
- const store3 = tx3.objectStore("books");
- const index3 = store3.index("by_author");
- const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
-
- await promiseFromRequest(request3);
-
- let cursor: BridgeIDBCursorWithValue | null;
- cursor = request3.result as BridgeIDBCursorWithValue;
- t.is(cursor.value.author, "Fred");
- t.is(cursor.value.isbn, 123456);
-
- cursor.continue();
-
- await promiseFromRequest(request3);
-
- cursor = request3.result as BridgeIDBCursorWithValue;
- t.is(cursor.value.author, "Fred");
- t.is(cursor.value.isbn, 234567);
-
- await promiseFromTransaction(tx3);
-
- const tx4 = db.transaction("books", "readonly");
- const store4 = tx4.objectStore("books");
- const request4 = store4.openCursor();
-
- await promiseFromRequest(request4);
-
- cursor = request4.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.isbn, 123456);
-
- cursor.continue();
-
- await promiseFromRequest(request4);
-
- cursor = request4.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.isbn, 234567);
-
- cursor.continue();
-
- await promiseFromRequest(request4);
-
- cursor = request4.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.isbn, 345678);
-
- cursor.continue();
- await promiseFromRequest(request4);
-
- cursor = request4.result;
-
- t.is(cursor, null);
-
- const tx5 = db.transaction("books", "readonly");
- const store5 = tx5.objectStore("books");
- const index5 = store5.index("by_author");
-
- const request5 = index5.openCursor(null, "next");
-
- await promiseFromRequest(request5);
- cursor = request5.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.author, "Barney");
- cursor.continue();
-
- await promiseFromRequest(request5);
- cursor = request5.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.author, "Fred");
- cursor.continue();
-
- await promiseFromRequest(request5);
- cursor = request5.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.author, "Fred");
- cursor.continue();
-
- await promiseFromRequest(request5);
- cursor = request5.result;
- t.is(cursor, null);
-
- const request6 = index5.openCursor(null, "nextunique");
-
- await promiseFromRequest(request6);
- cursor = request6.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.author, "Barney");
- cursor.continue();
-
- await promiseFromRequest(request6);
- cursor = request6.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.author, "Fred");
- t.is(cursor.value.isbn, 123456);
- cursor.continue();
-
- await promiseFromRequest(request6);
- cursor = request6.result;
- t.is(cursor, null);
-
- const request7 = index5.openCursor(null, "prevunique");
- await promiseFromRequest(request7);
- cursor = request7.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.author, "Fred");
- t.is(cursor.value.isbn, 123456);
- cursor.continue();
-
- await promiseFromRequest(request7);
- cursor = request7.result;
- if (!cursor) {
- throw new Error();
- }
- t.is(cursor.value.author, "Barney");
- cursor.continue();
-
- await promiseFromRequest(request7);
- cursor = request7.result;
- t.is(cursor, null);
-
- db.close();
-
- t.pass();
-});
-
-test("simple deletion", async (t) => {
- const backend = new MemoryBackend();
- const idb = new BridgeIDBFactory(backend);
-
- const request = idb.open("library");
- request.onupgradeneeded = () => {
- const db = request.result;
- const store = db.createObjectStore("books", { keyPath: "isbn" });
- const titleIndex = store.createIndex("by_title", "title", { unique: true });
- const authorIndex = store.createIndex("by_author", "author");
- };
-
- const db: BridgeIDBDatabase = await promiseFromRequest(request);
-
- t.is(db.name, "library");
-
- const tx = db.transaction("books", "readwrite");
- tx.oncomplete = () => {
- console.log("oncomplete called");
- };
-
- const store = tx.objectStore("books");
-
- store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
- store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
- store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
-
- await promiseFromTransaction(tx);
-
- const tx2 = db.transaction("books", "readwrite");
-
- const store2 = tx2.objectStore("books");
-
- const req1 = store2.get(234567);
- await promiseFromRequest(req1);
- t.is(req1.readyState, "done");
- t.is(req1.result.author, "Fred");
-
- store2.delete(123456);
-
- const req2 = store2.get(123456);
- await promiseFromRequest(req2);
- t.is(req2.readyState, "done");
- t.is(req2.result, undefined);
-
- const req3 = store2.get(234567);
- await promiseFromRequest(req3);
- t.is(req3.readyState, "done");
- t.is(req3.result.author, "Fred");
-
- await promiseFromTransaction(tx2);
-
- t.pass();
-});
+import { BridgeIDBDatabase, BridgeIDBFactory } from "./bridge-idb.js";
+import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js";
test("export", async (t) => {
const backend = new MemoryBackend();
@@ -386,276 +61,3 @@ test("export", async (t) => {
t.is(exportedData2.databases["library"].schema.databaseVersion, 42);
t.pass();
});
-
-test("update with non-existent index values", async (t) => {
- const backend = new MemoryBackend();
- backend.enableTracing = true;
- const idb = new BridgeIDBFactory(backend);
- const request = idb.open("mydb");
- request.onupgradeneeded = () => {
- const db = request.result;
- const store = db.createObjectStore("bla", { keyPath: "x" });
- store.createIndex("by_y", "y");
- store.createIndex("by_z", "z");
- };
-
- const db: BridgeIDBDatabase = await promiseFromRequest(request);
-
- t.is(db.name, "mydb");
-
- {
- const tx = db.transaction("bla", "readwrite");
- const store = tx.objectStore("bla");
- store.put({ x: 0, y: "a", z: 42 });
- const index = store.index("by_z");
- const indRes = await promiseFromRequest(index.get(42));
- t.is(indRes.x, 0);
- const res = await promiseFromRequest(store.get(0));
- t.is(res.z, 42);
- await promiseFromTransaction(tx);
- }
-
- {
- const tx = db.transaction("bla", "readwrite");
- const store = tx.objectStore("bla");
- store.put({ x: 0, y: "a" });
- const res = await promiseFromRequest(store.get(0));
- t.is(res.z, undefined);
- await promiseFromTransaction(tx);
- }
-
- {
- const tx = db.transaction("bla", "readwrite");
- const store = tx.objectStore("bla");
- const index = store.index("by_z");
- {
- const indRes = await promiseFromRequest(index.get(42));
- t.is(indRes, undefined);
- }
- const res = await promiseFromRequest(store.get(0));
- t.is(res.z, undefined);
- await promiseFromTransaction(tx);
- }
-
- t.pass();
-});
-
-test("delete from unique index", async (t) => {
- const backend = new MemoryBackend();
- backend.enableTracing = true;
- const idb = new BridgeIDBFactory(backend);
- const request = idb.open("mydb");
- request.onupgradeneeded = () => {
- const db = request.result as IDBDatabase;
- const store = db.createObjectStore("bla", { keyPath: "x" });
- store.createIndex("by_yz", ["y", "z"], {
- unique: true,
- });
- };
-
- const db: BridgeIDBDatabase = await promiseFromRequest(request);
-
- t.is(db.name, "mydb");
-
- {
- const tx = db.transaction("bla", "readwrite");
- const store = tx.objectStore("bla");
- store.put({ x: 0, y: "a", z: 42 });
- const index = store.index("by_yz");
- const indRes = await promiseFromRequest(index.get(["a", 42]));
- t.is(indRes.x, 0);
- const res = await promiseFromRequest(store.get(0));
- t.is(res.z, 42);
- await promiseFromTransaction(tx);
- }
-
- {
- const tx = db.transaction("bla", "readwrite");
- const store = tx.objectStore("bla");
- store.put({ x: 0, y: "a", z: 42, extra: 123 });
- await promiseFromTransaction(tx);
- }
-
- t.pass();
-});
-
-test("range queries", async (t) => {
- const backend = new MemoryBackend();
- backend.enableTracing = true;
- const idb = new BridgeIDBFactory(backend);
-
- const request = idb.open("mydb");
- request.onupgradeneeded = () => {
- const db = request.result;
- const store = db.createObjectStore("bla", { keyPath: "x" });
- store.createIndex("by_y", "y");
- store.createIndex("by_z", "z");
- };
-
- const db: BridgeIDBDatabase = await promiseFromRequest(request);
-
- t.is(db.name, "mydb");
-
- const tx = db.transaction("bla", "readwrite");
-
- const store = tx.objectStore("bla");
-
- store.put({ x: 0, y: "a" });
- store.put({ x: 2, y: "a" });
- store.put({ x: 4, y: "b" });
- store.put({ x: 8, y: "b" });
- store.put({ x: 10, y: "c" });
- store.put({ x: 12, y: "c" });
-
- await promiseFromTransaction(tx);
-
- async function doCursorStoreQuery(
- range: IDBKeyRange | IDBValidKey | undefined,
- direction: IDBCursorDirection | undefined,
- expected: any[],
- ): Promise<void> {
- const tx = db.transaction("bla", "readwrite");
- const store = tx.objectStore("bla");
- const vals: any[] = [];
-
- const req = store.openCursor(range, direction);
- while (1) {
- await promiseFromRequest(req);
- const cursor: IDBCursorWithValue = req.result;
- if (!cursor) {
- break;
- }
- cursor.continue();
- vals.push(cursor.value);
- }
-
- await promiseFromTransaction(tx);
-
- t.deepEqual(vals, expected);
- }
-
- async function doCursorIndexQuery(
- range: IDBKeyRange | IDBValidKey | undefined,
- direction: IDBCursorDirection | undefined,
- expected: any[],
- ): Promise<void> {
- const tx = db.transaction("bla", "readwrite");
- const store = tx.objectStore("bla");
- const index = store.index("by_y");
- const vals: any[] = [];
-
- const req = index.openCursor(range, direction);
- while (1) {
- await promiseFromRequest(req);
- const cursor: IDBCursorWithValue = req.result;
- if (!cursor) {
- break;
- }
- cursor.continue();
- vals.push(cursor.value);
- }
-
- await promiseFromTransaction(tx);
-
- t.deepEqual(vals, expected);
- }
-
- await doCursorStoreQuery(undefined, undefined, [
- {
- x: 0,
- y: "a",
- },
- {
- x: 2,
- y: "a",
- },
- {
- x: 4,
- y: "b",
- },
- {
- x: 8,
- y: "b",
- },
- {
- x: 10,
- y: "c",
- },
- {
- x: 12,
- y: "c",
- },
- ]);
-
- await doCursorStoreQuery(
- BridgeIDBKeyRange.bound(0, 12, true, true),
- undefined,
- [
- {
- x: 2,
- y: "a",
- },
- {
- x: 4,
- y: "b",
- },
- {
- x: 8,
- y: "b",
- },
- {
- x: 10,
- y: "c",
- },
- ],
- );
-
- await doCursorIndexQuery(
- BridgeIDBKeyRange.bound("a", "c", true, true),
- undefined,
- [
- {
- x: 4,
- y: "b",
- },
- {
- x: 8,
- y: "b",
- },
- ],
- );
-
- await doCursorIndexQuery(undefined, "nextunique", [
- {
- x: 0,
- y: "a",
- },
- {
- x: 4,
- y: "b",
- },
- {
- x: 10,
- y: "c",
- },
- ]);
-
- await doCursorIndexQuery(undefined, "prevunique", [
- {
- x: 10,
- y: "c",
- },
- {
- x: 4,
- y: "b",
- },
- {
- x: 0,
- y: "a",
- },
- ]);
-
- db.close();
-
- t.pass();
-});
diff --git a/packages/idb-bridge/src/MemoryBackend.ts b/packages/idb-bridge/src/MemoryBackend.ts
index f40f1c98b..526920a9f 100644
--- a/packages/idb-bridge/src/MemoryBackend.ts
+++ b/packages/idb-bridge/src/MemoryBackend.ts
@@ -14,43 +14,38 @@
permissions and limitations under the License.
*/
+import { AsyncCondition, TransactionLevel } from "./backend-common.js";
import {
Backend,
+ ConnectResult,
DatabaseConnection,
DatabaseTransaction,
- Schema,
- RecordStoreRequest,
- IndexProperties,
- RecordGetRequest,
+ IndexGetQuery,
+ IndexMeta,
+ ObjectStoreGetQuery,
+ ObjectStoreMeta,
RecordGetResponse,
+ RecordStoreRequest,
+ RecordStoreResponse,
ResultLevel,
StoreLevel,
- RecordStoreResponse,
} from "./backend-interface.js";
+import { BridgeIDBKeyRange } from "./bridge-idb.js";
+import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js";
+import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js";
+import { compareKeys } from "./util/cmp.js";
+import { ConstraintError, DataError } from "./util/errors.js";
+import { getIndexKeys } from "./util/getIndexKeys.js";
+import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
import {
structuredClone,
structuredEncapsulate,
structuredRevive,
} from "./util/structuredClone.js";
-import { ConstraintError, DataError } from "./util/errors.js";
-import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js";
-import { compareKeys } from "./util/cmp.js";
-import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
-import { getIndexKeys } from "./util/getIndexKeys.js";
-import { openPromise } from "./util/openPromise.js";
-import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js";
-import { BridgeIDBKeyRange } from "./bridge-idb.js";
type Key = IDBValidKey;
type Value = unknown;
-enum TransactionLevel {
- None = 0,
- Read = 1,
- Write = 2,
- VersionChange = 3,
-}
-
interface ObjectStore {
originalName: string;
modifiedName: string | undefined;
@@ -95,24 +90,39 @@ interface Database {
connectionCookies: string[];
}
-/** @public */
export interface ObjectStoreDump {
name: string;
keyGenerator: number;
records: ObjectStoreRecord[];
}
-/** @public */
export interface DatabaseDump {
schema: Schema;
objectStores: { [name: string]: ObjectStoreDump };
}
-/** @public */
export interface MemoryBackendDump {
databases: { [name: string]: DatabaseDump };
}
+export interface ObjectStoreProperties {
+ keyPath: string | string[] | null;
+ autoIncrement: boolean;
+ indexes: { [nameame: string]: IndexProperties };
+}
+
+export interface IndexProperties {
+ keyPath: string | string[];
+ multiEntry: boolean;
+ unique: boolean;
+}
+
+export interface Schema {
+ databaseName: string;
+ databaseVersion: number;
+ objectStores: { [name: string]: ObjectStoreProperties };
+}
+
interface ObjectStoreMapEntry {
store: ObjectStore;
indexMap: { [currentName: string]: Index };
@@ -142,27 +152,6 @@ export interface ObjectStoreRecord {
value: Value;
}
-class AsyncCondition {
- _waitPromise: Promise<void>;
- _resolveWaitPromise: () => void;
- constructor() {
- const op = openPromise<void>();
- this._waitPromise = op.promise;
- this._resolveWaitPromise = op.resolve;
- }
-
- wait(): Promise<void> {
- return this._waitPromise;
- }
-
- trigger(): void {
- this._resolveWaitPromise();
- const op = openPromise<void>();
- this._waitPromise = op.promise;
- this._resolveWaitPromise = op.resolve;
- }
-}
-
function nextStoreKey<T>(
forward: boolean,
data: ISortedMapF<Key, ObjectStoreRecord>,
@@ -178,12 +167,6 @@ function nextStoreKey<T>(
return res[1].primaryKey;
}
-function assertInvariant(cond: boolean): asserts cond {
- if (!cond) {
- throw Error("invariant failed");
- }
-}
-
function nextKey(
forward: boolean,
tree: ISortedSetF<IDBValidKey>,
@@ -230,6 +213,7 @@ function furthestKey(
}
export interface AccessStats {
+ primitiveStatements: number;
writeTransactions: number;
readTransactions: number;
writesPerStore: Record<string, number>;
@@ -279,6 +263,7 @@ export class MemoryBackend implements Backend {
trackStats: boolean = true;
accessStats: AccessStats = {
+ primitiveStatements: 0,
readTransactions: 0,
writeTransactions: 0,
readsPerStore: {},
@@ -459,7 +444,7 @@ export class MemoryBackend implements Backend {
delete this.databases[name];
}
- async connectDatabase(name: string): Promise<DatabaseConnection> {
+ async connectDatabase(name: string): Promise<ConnectResult> {
if (this.enableTracing) {
console.log(`TRACING: connectDatabase(${name})`);
}
@@ -498,7 +483,11 @@ export class MemoryBackend implements Backend {
this.connections[connectionCookie] = myConn;
- return { connectionCookie };
+ return {
+ conn: { connectionCookie },
+ version: database.committedSchema.databaseVersion,
+ objectStores: Object.keys(database.committedSchema.objectStores).sort(),
+ };
}
async beginTransaction(
@@ -601,14 +590,6 @@ export class MemoryBackend implements Backend {
this.disconnectCond.trigger();
}
- private requireConnection(dbConn: DatabaseConnection): Connection {
- const myConn = this.connections[dbConn.connectionCookie];
- if (!myConn) {
- throw Error(`unknown connection (${dbConn.connectionCookie})`);
- }
- return myConn;
- }
-
private requireConnectionFromTransaction(
btx: DatabaseTransaction,
): Connection {
@@ -619,36 +600,6 @@ export class MemoryBackend implements Backend {
return myConn;
}
- getSchema(dbConn: DatabaseConnection): Schema {
- if (this.enableTracing) {
- console.log(`TRACING: getSchema`);
- }
- const myConn = this.requireConnection(dbConn);
- const db = this.databases[myConn.dbName];
- if (!db) {
- throw Error("db not found");
- }
- return db.committedSchema;
- }
-
- getCurrentTransactionSchema(btx: DatabaseTransaction): Schema {
- const myConn = this.requireConnectionFromTransaction(btx);
- const db = this.databases[myConn.dbName];
- if (!db) {
- throw Error("db not found");
- }
- return myConn.modifiedSchema;
- }
-
- getInitialTransactionSchema(btx: DatabaseTransaction): Schema {
- const myConn = this.requireConnectionFromTransaction(btx);
- const db = this.databases[myConn.dbName];
- if (!db) {
- throw Error("db not found");
- }
- return db.committedSchema;
- }
-
renameIndex(
btx: DatabaseTransaction,
objectStoreName: string,
@@ -799,7 +750,7 @@ export class MemoryBackend implements Backend {
createObjectStore(
btx: DatabaseTransaction,
name: string,
- keyPath: string[] | null,
+ keyPath: string | string[] | null,
autoIncrement: boolean,
): void {
if (this.enableTracing) {
@@ -842,7 +793,7 @@ export class MemoryBackend implements Backend {
btx: DatabaseTransaction,
indexName: string,
objectStoreName: string,
- keyPath: string[],
+ keyPath: string | string[],
multiEntry: boolean,
unique: boolean,
): void {
@@ -1102,12 +1053,91 @@ export class MemoryBackend implements Backend {
}
}
- async getRecords(
+ async getObjectStoreRecords(
+ btx: DatabaseTransaction,
+ req: ObjectStoreGetQuery,
+ ): Promise<RecordGetResponse> {
+ if (this.enableTracing) {
+ console.log(`TRACING: getObjectStoreRecords`);
+ console.log("query", req);
+ }
+ const myConn = this.requireConnectionFromTransaction(btx);
+ const db = this.databases[myConn.dbName];
+ if (!db) {
+ throw Error("db not found");
+ }
+ if (db.txLevel < TransactionLevel.Read) {
+ throw Error("only allowed while running a transaction");
+ }
+ if (
+ db.txRestrictObjectStores &&
+ !db.txRestrictObjectStores.includes(req.objectStoreName)
+ ) {
+ throw Error(
+ `Not allowed to access store '${
+ req.objectStoreName
+ }', transaction is over ${JSON.stringify(db.txRestrictObjectStores)}`,
+ );
+ }
+ const objectStoreMapEntry = myConn.objectStoreMap[req.objectStoreName];
+ if (!objectStoreMapEntry) {
+ throw Error("object store not found");
+ }
+
+ let range;
+ if (req.range == null) {
+ range = new BridgeIDBKeyRange(undefined, undefined, true, true);
+ } else {
+ range = req.range;
+ }
+
+ if (typeof range !== "object") {
+ throw Error(
+ "getObjectStoreRecords was given an invalid range (sanity check failed, not an object)",
+ );
+ }
+
+ if (!("lowerOpen" in range)) {
+ throw Error(
+ "getObjectStoreRecords was given an invalid range (sanity check failed, lowerOpen missing)",
+ );
+ }
+
+ const forward: boolean =
+ req.direction === "next" || req.direction === "nextunique";
+
+ const storeData =
+ objectStoreMapEntry.store.modifiedData ||
+ objectStoreMapEntry.store.originalData;
+
+ const resp = getObjectStoreRecords({
+ forward,
+ storeData,
+ limit: req.limit,
+ range,
+ resultLevel: req.resultLevel,
+ advancePrimaryKey: req.advancePrimaryKey,
+ lastObjectStorePosition: req.lastObjectStorePosition,
+ });
+ if (this.trackStats) {
+ const k = `${req.objectStoreName}`;
+ this.accessStats.readsPerStore[k] =
+ (this.accessStats.readsPerStore[k] ?? 0) + 1;
+ this.accessStats.readItemsPerStore[k] =
+ (this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
+ }
+ if (this.enableTracing) {
+ console.log(`TRACING: getRecords got ${resp.count} results`);
+ }
+ return resp;
+ }
+
+ async getIndexRecords(
btx: DatabaseTransaction,
- req: RecordGetRequest,
+ req: IndexGetQuery,
): Promise<RecordGetResponse> {
if (this.enableTracing) {
- console.log(`TRACING: getRecords`);
+ console.log(`TRACING: getIndexRecords`);
console.log("query", req);
}
const myConn = this.requireConnectionFromTransaction(btx);
@@ -1161,58 +1191,31 @@ export class MemoryBackend implements Backend {
objectStoreMapEntry.store.modifiedData ||
objectStoreMapEntry.store.originalData;
- const haveIndex = req.indexName !== undefined;
-
- let resp: RecordGetResponse;
-
- if (haveIndex) {
- const index =
- myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!];
- const indexData = index.modifiedData || index.originalData;
- resp = getIndexRecords({
- forward,
- indexData,
- storeData,
- limit: req.limit,
- unique,
- range,
- resultLevel: req.resultLevel,
- advanceIndexKey: req.advanceIndexKey,
- advancePrimaryKey: req.advancePrimaryKey,
- lastIndexPosition: req.lastIndexPosition,
- lastObjectStorePosition: req.lastObjectStorePosition,
- });
- if (this.trackStats) {
- const k = `${req.objectStoreName}.${req.indexName}`;
- this.accessStats.readsPerIndex[k] =
- (this.accessStats.readsPerIndex[k] ?? 0) + 1;
- this.accessStats.readItemsPerIndex[k] =
- (this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count;
- }
- } else {
- if (req.advanceIndexKey !== undefined) {
- throw Error("unsupported request");
- }
- resp = getObjectStoreRecords({
- forward,
- storeData,
- limit: req.limit,
- range,
- resultLevel: req.resultLevel,
- advancePrimaryKey: req.advancePrimaryKey,
- lastIndexPosition: req.lastIndexPosition,
- lastObjectStorePosition: req.lastObjectStorePosition,
- });
- if (this.trackStats) {
- const k = `${req.objectStoreName}`;
- this.accessStats.readsPerStore[k] =
- (this.accessStats.readsPerStore[k] ?? 0) + 1;
- this.accessStats.readItemsPerStore[k] =
- (this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
- }
+ const index =
+ myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!];
+ const indexData = index.modifiedData || index.originalData;
+ const resp = getIndexRecords({
+ forward,
+ indexData,
+ storeData,
+ limit: req.limit,
+ unique,
+ range,
+ resultLevel: req.resultLevel,
+ advanceIndexKey: req.advanceIndexKey,
+ advancePrimaryKey: req.advancePrimaryKey,
+ lastIndexPosition: req.lastIndexPosition,
+ lastObjectStorePosition: req.lastObjectStorePosition,
+ });
+ if (this.trackStats) {
+ const k = `${req.objectStoreName}.${req.indexName}`;
+ this.accessStats.readsPerIndex[k] =
+ (this.accessStats.readsPerIndex[k] ?? 0) + 1;
+ this.accessStats.readItemsPerIndex[k] =
+ (this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count;
}
if (this.enableTracing) {
- console.log(`TRACING: getRecords got ${resp.count} results`);
+ console.log(`TRACING: getIndexRecords got ${resp.count} results`);
}
return resp;
}
@@ -1294,13 +1297,13 @@ export class MemoryBackend implements Backend {
let storeKeyResult: StoreKeyResult;
try {
- storeKeyResult = makeStoreKeyValue(
- storeReq.value,
- storeReq.key,
- keygen,
- autoIncrement,
- keyPath,
- );
+ storeKeyResult = makeStoreKeyValue({
+ value: storeReq.value,
+ key: storeReq.key,
+ currentKeyGenerator: keygen,
+ autoIncrement: autoIncrement,
+ keyPath: keyPath,
+ });
} catch (e) {
if (e instanceof DataError) {
const kp = JSON.stringify(keyPath);
@@ -1445,7 +1448,7 @@ export class MemoryBackend implements Backend {
}
}
- async rollback(btx: DatabaseTransaction): Promise<void> {
+ rollback(btx: DatabaseTransaction): void {
if (this.enableTracing) {
console.log(`TRACING: rollback`);
}
@@ -1536,6 +1539,57 @@ export class MemoryBackend implements Backend {
await this.afterCommitCallback();
}
}
+
+ getObjectStoreMeta(
+ dbConn: DatabaseConnection,
+ objectStoreName: string,
+ ): ObjectStoreMeta | undefined {
+ const conn = this.connections[dbConn.connectionCookie];
+ if (!conn) {
+ throw Error("db connection not found");
+ }
+ let schema = conn.modifiedSchema;
+ if (!schema) {
+ throw Error();
+ }
+ const storeInfo = schema.objectStores[objectStoreName];
+ if (!storeInfo) {
+ return undefined;
+ }
+ return {
+ autoIncrement: storeInfo.autoIncrement,
+ indexSet: Object.keys(storeInfo.indexes).sort(),
+ keyPath: structuredClone(storeInfo.keyPath),
+ };
+ }
+
+ getIndexMeta(
+ dbConn: DatabaseConnection,
+ objectStoreName: string,
+ indexName: string,
+ ): IndexMeta | undefined {
+ const conn = this.connections[dbConn.connectionCookie];
+ if (!conn) {
+ throw Error("db connection not found");
+ }
+ let schema = conn.modifiedSchema;
+ if (!schema) {
+ throw Error();
+ }
+ const storeInfo = schema.objectStores[objectStoreName];
+ if (!storeInfo) {
+ return undefined;
+ }
+ const indexInfo = storeInfo.indexes[indexName];
+ if (!indexInfo) {
+ return;
+ }
+ return {
+ keyPath: structuredClone(indexInfo.keyPath),
+ multiEntry: indexInfo.multiEntry,
+ unique: indexInfo.unique,
+ };
+ }
}
function getIndexRecords(req: {
@@ -1734,7 +1788,6 @@ function getIndexRecords(req: {
function getObjectStoreRecords(req: {
storeData: ISortedMapF<IDBValidKey, ObjectStoreRecord>;
- lastIndexPosition?: IDBValidKey;
forward: boolean;
range: IDBKeyRange;
lastObjectStorePosition?: IDBValidKey;
@@ -1743,7 +1796,6 @@ function getObjectStoreRecords(req: {
resultLevel: ResultLevel;
}): RecordGetResponse {
let numResults = 0;
- const indexKeys: Key[] = [];
const primaryKeys: Key[] = [];
const values: Value[] = [];
const { storeData, range, forward } = req;
@@ -1751,8 +1803,7 @@ function getObjectStoreRecords(req: {
function packResult(): RecordGetResponse {
return {
count: numResults,
- indexKeys:
- req.resultLevel >= ResultLevel.OnlyKeys ? indexKeys : undefined,
+ indexKeys: undefined,
primaryKeys:
req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
values: req.resultLevel >= ResultLevel.Full ? values : undefined,
@@ -1762,8 +1813,8 @@ function getObjectStoreRecords(req: {
const rangeStart = forward ? range.lower : range.upper;
const dataStart = forward ? storeData.minKey() : storeData.maxKey();
let storePos = req.lastObjectStorePosition;
- storePos = furthestKey(forward, storePos, rangeStart);
storePos = furthestKey(forward, storePos, dataStart);
+ storePos = furthestKey(forward, storePos, rangeStart);
storePos = furthestKey(forward, storePos, req.advancePrimaryKey);
if (storePos != null) {
diff --git a/packages/idb-bridge/src/SqliteBackend.test.ts b/packages/idb-bridge/src/SqliteBackend.test.ts
new file mode 100644
index 000000000..612cb9d4b
--- /dev/null
+++ b/packages/idb-bridge/src/SqliteBackend.test.ts
@@ -0,0 +1,83 @@
+/*
+ Copyright 2019 Florian Dold
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ or implied. See the License for the specific language governing
+ permissions and limitations under the License.
+ */
+
+import test from "ava";
+import { createSqliteBackend } from "./SqliteBackend.js";
+import { ResultLevel, StoreLevel } from "./backend-interface.js";
+import { BridgeIDBKeyRange } from "./bridge-idb.js";
+import * as fs from "node:fs";
+import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+
+test("sqlite3 backend", async (t) => {
+ const filename = "mytestdb.sqlite3";
+ try {
+ fs.unlinkSync(filename);
+ } catch (e) {
+ // Do nothing.
+ }
+ try {
+ const sqlite3Impl = await createNodeSqlite3Impl();
+ const backend = await createSqliteBackend(sqlite3Impl, {
+ filename,
+ });
+ const dbConnRes = await backend.connectDatabase("mydb");
+ const dbConn = dbConnRes.conn;
+ const tx = await backend.enterVersionChange(dbConn, 1);
+ backend.createObjectStore(tx, "books", "isbn", true);
+ backend.createIndex(tx, "byName", "books", "name", false, false);
+ await backend.storeRecord(tx, {
+ objectStoreName: "books",
+ storeLevel: StoreLevel.AllowOverwrite,
+ value: { name: "foo" },
+ key: undefined,
+ });
+ const res = await backend.getObjectStoreRecords(tx, {
+ direction: "next",
+ limit: 1,
+ objectStoreName: "books",
+ resultLevel: ResultLevel.Full,
+ range: BridgeIDBKeyRange.only(1),
+ });
+ t.deepEqual(res.count, 1);
+ t.deepEqual(res.primaryKeys![0], 1);
+ t.deepEqual(res.values![0].name, "foo");
+
+ const indexRes = await backend.getIndexRecords(tx, {
+ direction: "next",
+ limit: 1,
+ objectStoreName: "books",
+ indexName: "byName",
+ resultLevel: ResultLevel.Full,
+ range: BridgeIDBKeyRange.only("foo"),
+ });
+
+ t.deepEqual(indexRes.count, 1);
+ t.deepEqual(indexRes.values![0].isbn, 1);
+ t.deepEqual(indexRes.values![0].name, "foo");
+
+ await backend.commit(tx);
+
+ const tx2 = await backend.beginTransaction(dbConn, ["books"], "readwrite");
+ await backend.commit(tx2);
+
+ await backend.close(dbConn);
+
+ t.pass();
+ } catch (e: any) {
+ console.log(e);
+ throw e;
+ }
+});
diff --git a/packages/idb-bridge/src/SqliteBackend.ts b/packages/idb-bridge/src/SqliteBackend.ts
new file mode 100644
index 000000000..c40281861
--- /dev/null
+++ b/packages/idb-bridge/src/SqliteBackend.ts
@@ -0,0 +1,2301 @@
+/*
+ Copyright 2023 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
+ */
+
+/**
+ * Imports.
+ */
+import { AsyncCondition } from "./backend-common.js";
+import {
+ Backend,
+ ConnectResult,
+ DatabaseConnection,
+ DatabaseTransaction,
+ IndexGetQuery,
+ IndexMeta,
+ ObjectStoreGetQuery,
+ ObjectStoreMeta,
+ RecordGetResponse,
+ RecordStoreRequest,
+ RecordStoreResponse,
+ ResultLevel,
+ StoreLevel,
+} from "./backend-interface.js";
+import { BridgeIDBDatabaseInfo, BridgeIDBKeyRange } from "./bridge-idb.js";
+import {
+ IDBKeyPath,
+ IDBKeyRange,
+ IDBTransactionMode,
+ IDBValidKey,
+} from "./idbtypes.js";
+import {
+ AccessStats,
+ structuredEncapsulate,
+ structuredRevive,
+} from "./index.js";
+import { ConstraintError, DataError } from "./util/errors.js";
+import { getIndexKeys } from "./util/getIndexKeys.js";
+import { deserializeKey, serializeKey } from "./util/key-storage.js";
+import { makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
+import {
+ Sqlite3Database,
+ Sqlite3Interface,
+ Sqlite3Statement,
+} from "./sqlite3-interface.js";
+
+function assertDbInvariant(b: boolean): asserts b {
+ if (!b) {
+ throw Error("internal invariant failed");
+ }
+}
+
+const SqliteError = {
+ constraintPrimarykey: "SQLITE_CONSTRAINT_PRIMARYKEY",
+} as const;
+
+export type SqliteRowid = number | bigint;
+
+enum TransactionLevel {
+ None = 0,
+ Read = 1,
+ Write = 2,
+ VersionChange = 3,
+}
+
+interface ConnectionInfo {
+ // Database that the connection has
+ // connected to.
+ databaseName: string;
+}
+
+interface TransactionInfo {
+ connectionCookie: string;
+}
+
+interface ScopeIndexInfo {
+ indexId: SqliteRowid;
+ keyPath: IDBKeyPath | IDBKeyPath[];
+ multiEntry: boolean;
+ unique: boolean;
+}
+
+interface ScopeInfo {
+ /**
+ * Internal ID of the object store.
+ * Used for fast retrieval, since it's the
+ * primary key / rowid of the sqlite table.
+ */
+ objectStoreId: SqliteRowid;
+
+ indexMap: Map<string, ScopeIndexInfo>;
+}
+
+interface IndexIterPos {
+ objectPos: Uint8Array;
+ indexPos: Uint8Array;
+}
+
+export function serializeKeyPath(
+ keyPath: string | string[] | null,
+): string | null {
+ if (Array.isArray(keyPath)) {
+ return "," + keyPath.join(",");
+ }
+ return keyPath;
+}
+
+export function deserializeKeyPath(
+ dbKeyPath: string | null,
+): string | string[] | null {
+ if (dbKeyPath == null) {
+ return null;
+ }
+ if (dbKeyPath[0] === ",") {
+ const elems = dbKeyPath.split(",");
+ elems.splice(0, 1);
+ return elems;
+ } else {
+ return dbKeyPath;
+ }
+}
+
+interface Boundary {
+ key: Uint8Array;
+ inclusive: boolean;
+}
+
+function getRangeEndBoundary(
+ forward: boolean,
+ range: IDBKeyRange | undefined | null,
+): Boundary | undefined {
+ let endRangeKey: Uint8Array | undefined = undefined;
+ let endRangeInclusive: boolean = false;
+ if (range) {
+ if (forward && range.upper != null) {
+ endRangeKey = serializeKey(range.upper);
+ endRangeInclusive = !range.upperOpen;
+ } else if (!forward && range.lower != null) {
+ endRangeKey = serializeKey(range.lower);
+ endRangeInclusive = !range.lowerOpen;
+ }
+ }
+ if (endRangeKey) {
+ return {
+ inclusive: endRangeInclusive,
+ key: endRangeKey,
+ };
+ }
+ return undefined;
+}
+
+function isOutsideBoundary(
+ forward: boolean,
+ endRange: Boundary,
+ currentKey: Uint8Array,
+): boolean {
+ const cmp = compareSerializedKeys(currentKey, endRange.key);
+ if (forward && endRange.inclusive && cmp > 0) {
+ return true;
+ } else if (forward && !endRange.inclusive && cmp >= 0) {
+ return true;
+ } else if (!forward && endRange.inclusive && cmp < 0) {
+ return true;
+ } else if (!forward && !endRange.inclusive && cmp <= 0) {
+ return true;
+ }
+ return false;
+}
+
+function compareSerializedKeys(k1: Uint8Array, k2: Uint8Array): number {
+ // FIXME: Simplify!
+ let i = 0;
+ while (1) {
+ let x1 = i >= k1.length ? -1 : k1[i];
+ let x2 = i >= k2.length ? -1 : k2[i];
+ if (x1 < x2) {
+ return -1;
+ }
+ if (x1 > x2) {
+ return 1;
+ }
+ if (x1 < 0 && x2 < 0) {
+ return 0;
+ }
+ i++;
+ }
+ throw Error("not reached");
+}
+
+export function expectDbNumber(
+ resultRow: unknown,
+ name: string,
+): number | bigint {
+ assertDbInvariant(typeof resultRow === "object" && resultRow != null);
+ const res = (resultRow as any)[name];
+ if (typeof res !== "number") {
+ throw Error("unexpected type from database");
+ }
+ return res;
+}
+
+export function expectDbString(resultRow: unknown, name: string): string {
+ assertDbInvariant(typeof resultRow === "object" && resultRow != null);
+ const res = (resultRow as any)[name];
+ if (typeof res !== "string") {
+ throw Error("unexpected type from database");
+ }
+ return res;
+}
+
+export function expectDbStringOrNull(
+ resultRow: unknown,
+ name: string,
+): string | null {
+ assertDbInvariant(typeof resultRow === "object" && resultRow != null);
+ const res = (resultRow as any)[name];
+ if (res == null) {
+ return null;
+ }
+ if (typeof res !== "string") {
+ throw Error("unexpected type from database");
+ }
+ return res;
+}
+
+export class SqliteBackend implements Backend {
+ private connectionIdCounter = 1;
+ private transactionIdCounter = 1;
+
+ trackStats = false;
+
+ accessStats: AccessStats = {
+ primitiveStatements: 0, // Counted by the sqlite impl
+ readTransactions: 0,
+ writeTransactions: 0,
+ readsPerStore: {},
+ readsPerIndex: {},
+ readItemsPerIndex: {},
+ readItemsPerStore: {},
+ writesPerStore: {},
+ };
+
+ /**
+ * Condition that is triggered whenever a transaction finishes.
+ */
+ private transactionDoneCond: AsyncCondition = new AsyncCondition();
+
+ /**
+ * Is the connection blocked because either an open request
+ * or delete request is being processed?
+ */
+ private connectionBlocked: boolean = false;
+
+ private txLevel: TransactionLevel = TransactionLevel.None;
+
+ private txScope: Map<string, ScopeInfo> = new Map();
+
+ private connectionMap: Map<string, ConnectionInfo> = new Map();
+
+ private transactionMap: Map<string, TransactionInfo> = new Map();
+
+ private sqlPrepCache: Map<string, Sqlite3Statement> = new Map();
+
+ enableTracing: boolean = true;
+
+ constructor(
+ public sqliteImpl: Sqlite3Interface,
+ public db: Sqlite3Database,
+ ) {}
+
+ private _prep(sql: string): Sqlite3Statement {
+ const stmt = this.sqlPrepCache.get(sql);
+ if (stmt) {
+ return stmt;
+ }
+ const newStmt = this.db.prepare(sql);
+ this.sqlPrepCache.set(sql, newStmt);
+ return newStmt;
+ }
+
+ async getIndexRecords(
+ btx: DatabaseTransaction,
+ req: IndexGetQuery,
+ ): Promise<RecordGetResponse> {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.txLevel < TransactionLevel.Read) {
+ throw Error("only allowed in read transaction");
+ }
+ const scopeInfo = this.txScope.get(req.objectStoreName);
+ if (!scopeInfo) {
+ throw Error("object store not in scope");
+ }
+ const indexInfo = scopeInfo.indexMap.get(req.indexName);
+ if (!indexInfo) {
+ throw Error("index not found");
+ }
+ if (req.advancePrimaryKey != null) {
+ if (req.advanceIndexKey == null) {
+ throw Error(
+ "invalid request (advancePrimaryKey without advanceIndexKey)",
+ );
+ }
+ }
+
+ if (this.enableTracing) {
+ console.log(
+ `querying index os=${req.objectStoreName}, idx=${req.indexName}, direction=${req.direction}`,
+ );
+ }
+
+ const forward: boolean =
+ req.direction === "next" || req.direction === "nextunique";
+
+ const queryUnique =
+ req.direction === "nextunique" || req.direction === "prevunique";
+
+ const indexId = indexInfo.indexId;
+ const indexUnique = indexInfo.unique;
+
+ let numResults = 0;
+ const encPrimaryKeys: Uint8Array[] = [];
+ const encIndexKeys: Uint8Array[] = [];
+ const indexKeys: IDBValidKey[] = [];
+ const primaryKeys: IDBValidKey[] = [];
+ const values: unknown[] = [];
+
+ const endRange = getRangeEndBoundary(forward, req.range);
+
+ const backendThis = this;
+
+ function packResult() {
+ if (req.resultLevel > ResultLevel.OnlyCount) {
+ for (let i = 0; i < encPrimaryKeys.length; i++) {
+ primaryKeys.push(deserializeKey(encPrimaryKeys[i]));
+ }
+ for (let i = 0; i < encIndexKeys.length; i++) {
+ indexKeys.push(deserializeKey(encIndexKeys[i]));
+ }
+ if (req.resultLevel === ResultLevel.Full) {
+ for (let i = 0; i < encPrimaryKeys.length; i++) {
+ const val = backendThis._getObjectValue(
+ scopeInfo!.objectStoreId,
+ encPrimaryKeys[i],
+ );
+ if (!val) {
+ throw Error("invariant failed: value not found");
+ }
+ values.push(structuredRevive(JSON.parse(val)));
+ }
+ }
+ }
+
+ if (backendThis.enableTracing) {
+ console.log(`index query returned ${numResults} results`);
+ console.log(`result prim keys:`, primaryKeys);
+ console.log(`result index keys:`, indexKeys);
+ }
+
+ if (backendThis.trackStats) {
+ const k = `${req.objectStoreName}.${req.indexName}`;
+ backendThis.accessStats.readsPerIndex[k] =
+ (backendThis.accessStats.readsPerIndex[k] ?? 0) + 1;
+ backendThis.accessStats.readItemsPerIndex[k] =
+ (backendThis.accessStats.readItemsPerIndex[k] ?? 0) + numResults;
+ }
+
+ return {
+ count: numResults,
+ indexKeys: indexKeys,
+ primaryKeys:
+ req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
+ values: req.resultLevel >= ResultLevel.Full ? values : undefined,
+ };
+ }
+
+ let currentPos = this._startIndex({
+ indexId,
+ indexUnique,
+ queryUnique,
+ forward,
+ });
+
+ if (!currentPos) {
+ return packResult();
+ }
+
+ if (this.enableTracing && currentPos) {
+ console.log(`starting iteration at:`);
+ console.log(`indexKey:`, deserializeKey(currentPos.indexPos));
+ console.log(`objectKey:`, deserializeKey(currentPos.objectPos));
+ }
+
+ if (req.advanceIndexKey) {
+ const advanceIndexKey = serializeKey(req.advanceIndexKey);
+ const advancePrimaryKey = req.advancePrimaryKey
+ ? serializeKey(req.advancePrimaryKey)
+ : undefined;
+ currentPos = this._continueIndex({
+ indexId,
+ indexUnique,
+ queryUnique,
+ inclusive: true,
+ currentPos,
+ forward,
+ targetIndexKey: advanceIndexKey,
+ targetObjectKey: advancePrimaryKey,
+ });
+ if (!currentPos) {
+ return packResult();
+ }
+ }
+
+ if (req.lastIndexPosition) {
+ if (this.enableTracing) {
+ console.log("index query: seeking past last index position");
+ console.log("lastObjectPosition", req.lastObjectStorePosition);
+ console.log("lastIndexPosition", req.lastIndexPosition);
+ }
+ const lastIndexPosition = serializeKey(req.lastIndexPosition);
+ const lastObjectPosition = req.lastObjectStorePosition
+ ? serializeKey(req.lastObjectStorePosition)
+ : undefined;
+ currentPos = this._continueIndex({
+ indexId,
+ indexUnique,
+ queryUnique,
+ inclusive: false,
+ currentPos,
+ forward,
+ targetIndexKey: lastIndexPosition,
+ targetObjectKey: lastObjectPosition,
+ });
+ if (!currentPos) {
+ return packResult();
+ }
+ }
+
+ if (this.enableTracing && currentPos) {
+ console.log(
+ "before range, current index pos",
+ deserializeKey(currentPos.indexPos),
+ );
+ console.log(
+ "... current object pos",
+ deserializeKey(currentPos.objectPos),
+ );
+ }
+
+ if (req.range != null) {
+ const targetKeyObj = forward ? req.range.lower : req.range.upper;
+ if (targetKeyObj != null) {
+ const targetKey = serializeKey(targetKeyObj);
+ const inclusive = forward ? !req.range.lowerOpen : !req.range.upperOpen;
+ currentPos = this._continueIndex({
+ indexId,
+ indexUnique,
+ queryUnique,
+ inclusive,
+ currentPos,
+ forward,
+ targetIndexKey: targetKey,
+ });
+ }
+ if (!currentPos) {
+ return packResult();
+ }
+ }
+
+ if (this.enableTracing && currentPos) {
+ console.log(
+ "after range, current pos",
+ deserializeKey(currentPos.indexPos),
+ );
+ console.log(
+ "after range, current obj pos",
+ deserializeKey(currentPos.objectPos),
+ );
+ }
+
+ while (1) {
+ if (req.limit != 0 && numResults == req.limit) {
+ break;
+ }
+ if (currentPos == null) {
+ break;
+ }
+ if (
+ endRange &&
+ isOutsideBoundary(forward, endRange, currentPos.indexPos)
+ ) {
+ break;
+ }
+
+ numResults++;
+
+ if (req.resultLevel > ResultLevel.OnlyCount) {
+ encPrimaryKeys.push(currentPos.objectPos);
+ encIndexKeys.push(currentPos.indexPos);
+ }
+
+ currentPos = backendThis._continueIndex({
+ indexId,
+ indexUnique,
+ forward,
+ inclusive: false,
+ currentPos: undefined,
+ queryUnique,
+ targetIndexKey: currentPos.indexPos,
+ targetObjectKey: currentPos.objectPos,
+ });
+ }
+
+ return packResult();
+ }
+
+ // Continue past targetIndexKey (and optionally targetObjectKey)
+ // in the direction specified by "forward".
+ // Do nothing if the current position is already past the
+ // target position.
+ _continueIndex(req: {
+ indexId: SqliteRowid;
+ indexUnique: boolean;
+ queryUnique: boolean;
+ forward: boolean;
+ inclusive: boolean;
+ currentPos: IndexIterPos | null | undefined;
+ targetIndexKey: Uint8Array;
+ targetObjectKey?: Uint8Array;
+ }): IndexIterPos | undefined {
+ const currentPos = req.currentPos;
+ const forward = req.forward;
+ const dir = forward ? 1 : -1;
+ if (currentPos) {
+ // Check that the target position after the current position.
+ // If not, we just stay at the current position.
+ const indexCmp = compareSerializedKeys(
+ currentPos.indexPos,
+ req.targetIndexKey,
+ );
+ if (dir * indexCmp > 0) {
+ return currentPos;
+ }
+ if (indexCmp === 0) {
+ if (req.targetObjectKey != null) {
+ const objectCmp = compareSerializedKeys(
+ currentPos.objectPos,
+ req.targetObjectKey,
+ );
+ if (req.inclusive && objectCmp === 0) {
+ return currentPos;
+ }
+ if (dir * objectCmp > 0) {
+ return currentPos;
+ }
+ } else if (req.inclusive) {
+ return currentPos;
+ }
+ }
+ }
+
+ let stmt: Sqlite3Statement;
+
+ if (req.indexUnique) {
+ if (req.forward) {
+ if (req.inclusive) {
+ stmt = this._prep(sqlUniqueIndexDataContinueForwardInclusive);
+ } else {
+ stmt = this._prep(sqlUniqueIndexDataContinueForwardStrict);
+ }
+ } else {
+ if (req.inclusive) {
+ stmt = this._prep(sqlUniqueIndexDataContinueBackwardInclusive);
+ } else {
+ stmt = this._prep(sqlUniqueIndexDataContinueBackwardStrict);
+ }
+ }
+ } else {
+ if (req.forward) {
+ if (req.queryUnique || req.targetObjectKey == null) {
+ if (req.inclusive) {
+ stmt = this._prep(sqlIndexDataContinueForwardInclusiveUnique);
+ } else {
+ stmt = this._prep(sqlIndexDataContinueForwardStrictUnique);
+ }
+ } else {
+ if (req.inclusive) {
+ stmt = this._prep(sqlIndexDataContinueForwardInclusive);
+ } else {
+ stmt = this._prep(sqlIndexDataContinueForwardStrict);
+ }
+ }
+ } else {
+ if (req.queryUnique || req.targetObjectKey == null) {
+ if (req.inclusive) {
+ stmt = this._prep(sqlIndexDataContinueBackwardInclusiveUnique);
+ } else {
+ stmt = this._prep(sqlIndexDataContinueBackwardStrictUnique);
+ }
+ } else {
+ if (req.inclusive) {
+ stmt = this._prep(sqlIndexDataContinueBackwardInclusive);
+ } else {
+ stmt = this._prep(sqlIndexDataContinueBackwardStrict);
+ }
+ }
+ }
+ }
+
+ const res = stmt.getFirst({
+ index_id: req.indexId,
+ index_key: req.targetIndexKey,
+ object_key: req.targetObjectKey,
+ });
+
+ if (res == null) {
+ return undefined;
+ }
+
+ assertDbInvariant(typeof res === "object");
+ assertDbInvariant("index_key" in res);
+ const indexKey = res.index_key;
+ if (indexKey == null) {
+ return undefined;
+ }
+ assertDbInvariant(indexKey instanceof Uint8Array);
+ assertDbInvariant("object_key" in res);
+ const objectKey = res.object_key;
+ if (objectKey == null) {
+ return undefined;
+ }
+ assertDbInvariant(objectKey instanceof Uint8Array);
+
+ return {
+ indexPos: indexKey,
+ objectPos: objectKey,
+ };
+ }
+
+ _startIndex(req: {
+ indexId: SqliteRowid;
+ indexUnique: boolean;
+ queryUnique: boolean;
+ forward: boolean;
+ }): IndexIterPos | undefined {
+ let stmt: Sqlite3Statement;
+ if (req.indexUnique) {
+ if (req.forward) {
+ stmt = this._prep(sqlUniqueIndexDataStartForward);
+ } else {
+ stmt = this._prep(sqlUniqueIndexDataStartBackward);
+ }
+ } else {
+ if (req.forward) {
+ stmt = this._prep(sqlIndexDataStartForward);
+ } else {
+ if (req.queryUnique) {
+ stmt = this._prep(sqlIndexDataStartBackwardUnique);
+ } else {
+ stmt = this._prep(sqlIndexDataStartBackward);
+ }
+ }
+ }
+
+ const res = stmt.getFirst({
+ index_id: req.indexId,
+ });
+
+ if (res == null) {
+ return undefined;
+ }
+
+ assertDbInvariant(typeof res === "object");
+ assertDbInvariant("index_key" in res);
+ const indexKey = res.index_key;
+ assertDbInvariant(indexKey instanceof Uint8Array);
+ assertDbInvariant("object_key" in res);
+ const objectKey = res.object_key;
+ assertDbInvariant(objectKey instanceof Uint8Array);
+
+ return {
+ indexPos: indexKey,
+ objectPos: objectKey,
+ };
+ }
+
+ async getObjectStoreRecords(
+ btx: DatabaseTransaction,
+ req: ObjectStoreGetQuery,
+ ): Promise<RecordGetResponse> {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.txLevel < TransactionLevel.Read) {
+ throw Error("only allowed in read transaction");
+ }
+ const scopeInfo = this.txScope.get(req.objectStoreName);
+ if (!scopeInfo) {
+ throw Error(
+ `object store ${JSON.stringify(
+ req.objectStoreName,
+ )} not in transaction scope`,
+ );
+ }
+
+ const forward: boolean =
+ req.direction === "next" || req.direction === "nextunique";
+
+ let currentKey = this._startObjectKey(scopeInfo.objectStoreId, forward);
+
+ if (req.advancePrimaryKey != null) {
+ const targetKey = serializeKey(req.advancePrimaryKey);
+ currentKey = this._continueObjectKey({
+ objectStoreId: scopeInfo.objectStoreId,
+ forward,
+ inclusive: true,
+ currentKey,
+ targetKey,
+ });
+ }
+
+ if (req.lastObjectStorePosition != null) {
+ const targetKey = serializeKey(req.lastObjectStorePosition);
+ currentKey = this._continueObjectKey({
+ objectStoreId: scopeInfo.objectStoreId,
+ forward,
+ inclusive: false,
+ currentKey,
+ targetKey,
+ });
+ }
+
+ if (req.range != null) {
+ const targetKeyObj = forward ? req.range.lower : req.range.upper;
+ if (targetKeyObj != null) {
+ const targetKey = serializeKey(targetKeyObj);
+ const inclusive = forward ? !req.range.lowerOpen : !req.range.upperOpen;
+ currentKey = this._continueObjectKey({
+ objectStoreId: scopeInfo.objectStoreId,
+ forward,
+ inclusive,
+ currentKey,
+ targetKey,
+ });
+ }
+ }
+
+ const endRange = getRangeEndBoundary(forward, req.range);
+
+ let numResults = 0;
+ const encPrimaryKeys: Uint8Array[] = [];
+ const primaryKeys: IDBValidKey[] = [];
+ const values: unknown[] = [];
+
+ while (1) {
+ if (req.limit != 0 && numResults == req.limit) {
+ break;
+ }
+ if (currentKey == null) {
+ break;
+ }
+ if (endRange && isOutsideBoundary(forward, endRange, currentKey)) {
+ break;
+ }
+
+ numResults++;
+
+ if (req.resultLevel > ResultLevel.OnlyCount) {
+ encPrimaryKeys.push(currentKey);
+ }
+
+ currentKey = this._continueObjectKey({
+ objectStoreId: scopeInfo.objectStoreId,
+ forward,
+ inclusive: false,
+ currentKey: null,
+ targetKey: currentKey,
+ });
+ }
+
+ if (req.resultLevel > ResultLevel.OnlyCount) {
+ for (let i = 0; i < encPrimaryKeys.length; i++) {
+ primaryKeys.push(deserializeKey(encPrimaryKeys[i]));
+ }
+ if (req.resultLevel === ResultLevel.Full) {
+ for (let i = 0; i < encPrimaryKeys.length; i++) {
+ const val = this._getObjectValue(
+ scopeInfo.objectStoreId,
+ encPrimaryKeys[i],
+ );
+ if (!val) {
+ throw Error("invariant failed: value not found");
+ }
+ values.push(structuredRevive(JSON.parse(val)));
+ }
+ }
+ }
+
+ if (this.trackStats) {
+ const k = `${req.objectStoreName}`;
+ this.accessStats.readsPerStore[k] =
+ (this.accessStats.readsPerStore[k] ?? 0) + 1;
+ this.accessStats.readItemsPerStore[k] =
+ (this.accessStats.readItemsPerStore[k] ?? 0) + numResults;
+ }
+
+ return {
+ count: numResults,
+ indexKeys: undefined,
+ primaryKeys:
+ req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
+ values: req.resultLevel >= ResultLevel.Full ? values : undefined,
+ };
+ }
+
+ _startObjectKey(
+ objectStoreId: number | bigint,
+ forward: boolean,
+ ): Uint8Array | null {
+ let stmt: Sqlite3Statement;
+ if (forward) {
+ stmt = this._prep(sqlObjectDataStartForward);
+ } else {
+ stmt = this._prep(sqlObjectDataStartBackward);
+ }
+ const res = stmt.getFirst({
+ object_store_id: objectStoreId,
+ });
+ if (!res) {
+ return null;
+ }
+ assertDbInvariant(typeof res === "object");
+ assertDbInvariant("rkey" in res);
+ const rkey = res.rkey;
+ if (!rkey) {
+ return null;
+ }
+ assertDbInvariant(rkey instanceof Uint8Array);
+ return rkey;
+ }
+
+ // Result *must* be past targetKey in the direction
+ // specified by "forward".
+ _continueObjectKey(req: {
+ objectStoreId: number | bigint;
+ forward: boolean;
+ currentKey: Uint8Array | null;
+ targetKey: Uint8Array;
+ inclusive: boolean;
+ }): Uint8Array | null {
+ const { forward, currentKey, targetKey } = req;
+ const dir = forward ? 1 : -1;
+ if (currentKey) {
+ const objCmp = compareSerializedKeys(currentKey, targetKey);
+ if (objCmp === 0 && req.inclusive) {
+ return currentKey;
+ }
+ if (dir * objCmp > 0) {
+ return currentKey;
+ }
+ }
+
+ let stmt: Sqlite3Statement;
+
+ if (req.inclusive) {
+ if (req.forward) {
+ stmt = this._prep(sqlObjectDataContinueForwardInclusive);
+ } else {
+ stmt = this._prep(sqlObjectDataContinueBackwardInclusive);
+ }
+ } else {
+ if (req.forward) {
+ stmt = this._prep(sqlObjectDataContinueForward);
+ } else {
+ stmt = this._prep(sqlObjectDataContinueBackward);
+ }
+ }
+
+ const res = stmt.getFirst({
+ object_store_id: req.objectStoreId,
+ x: req.targetKey,
+ });
+
+ if (!res) {
+ return null;
+ }
+
+ assertDbInvariant(typeof res === "object");
+ assertDbInvariant("rkey" in res);
+ const rkey = res.rkey;
+ if (!rkey) {
+ return null;
+ }
+ assertDbInvariant(rkey instanceof Uint8Array);
+ return rkey;
+ }
+
+ _getObjectValue(
+ objectStoreId: number | bigint,
+ key: Uint8Array,
+ ): string | undefined {
+ const stmt = this._prep(sqlObjectDataValueFromKey);
+ const res = stmt.getFirst({
+ object_store_id: objectStoreId,
+ key: key,
+ });
+ if (!res) {
+ return undefined;
+ }
+ assertDbInvariant(typeof res === "object");
+ assertDbInvariant("value" in res);
+ assertDbInvariant(typeof res.value === "string");
+ return res.value;
+ }
+
+ getObjectStoreMeta(
+ dbConn: DatabaseConnection,
+ objectStoreName: string,
+ ): ObjectStoreMeta | undefined {
+ // FIXME: Use cached info from the connection for this!
+ const connInfo = this.connectionMap.get(dbConn.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+ name: objectStoreName,
+ database_name: connInfo.databaseName,
+ });
+ if (!objRes) {
+ throw Error("object store not found");
+ }
+ const objectStoreId = expectDbNumber(objRes, "id");
+ const keyPath = deserializeKeyPath(
+ expectDbStringOrNull(objRes, "key_path"),
+ );
+ const autoInc = expectDbNumber(objRes, "auto_increment");
+ const indexSet: string[] = [];
+ const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
+ object_store_id: objectStoreId,
+ });
+ for (const idxInfo of indexRes) {
+ const indexName = expectDbString(idxInfo, "name");
+ indexSet.push(indexName);
+ }
+ return {
+ keyPath,
+ autoIncrement: autoInc != 0,
+ indexSet,
+ };
+ }
+
+ getIndexMeta(
+ dbConn: DatabaseConnection,
+ objectStoreName: string,
+ indexName: string,
+ ): IndexMeta | undefined {
+ // FIXME: Use cached info from the connection for this!
+ const connInfo = this.connectionMap.get(dbConn.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+ name: objectStoreName,
+ database_name: connInfo.databaseName,
+ });
+ if (!objRes) {
+ throw Error("object store not found");
+ }
+ const objectStoreId = expectDbNumber(objRes, "id");
+ const idxInfo = this._prep(sqlGetIndexByName).getFirst({
+ object_store_id: objectStoreId,
+ name: indexName,
+ });
+ const indexUnique = expectDbNumber(idxInfo, "unique_index");
+ const indexMultiEntry = expectDbNumber(idxInfo, "multientry");
+ const indexKeyPath = deserializeKeyPath(
+ expectDbString(idxInfo, "key_path"),
+ );
+ if (!indexKeyPath) {
+ throw Error("db inconsistent");
+ }
+ return {
+ keyPath: indexKeyPath,
+ multiEntry: indexMultiEntry != 0,
+ unique: indexUnique != 0,
+ };
+ }
+
+ async getDatabases(): Promise<BridgeIDBDatabaseInfo[]> {
+ const dbList = this._prep(sqlListDatabases).getAll();
+ let res: BridgeIDBDatabaseInfo[] = [];
+ for (const r of dbList) {
+ res.push({
+ name: (r as any).name,
+ version: (r as any).version,
+ });
+ }
+
+ return res;
+ }
+
+ private _loadObjectStoreNames(databaseName: string): string[] {
+ const objectStoreNames: string[] = [];
+ const storesRes = this._prep(sqlGetObjectStoresByDatabase).getAll({
+ database_name: databaseName,
+ });
+ for (const res of storesRes) {
+ assertDbInvariant(res != null && typeof res === "object");
+ assertDbInvariant("name" in res);
+ const storeName = res.name;
+ assertDbInvariant(typeof storeName === "string");
+ objectStoreNames.push(storeName);
+ }
+ return objectStoreNames;
+ }
+
+ async connectDatabase(databaseName: string): Promise<ConnectResult> {
+ const connectionId = this.connectionIdCounter++;
+ const connectionCookie = `connection-${connectionId}`;
+
+ // Wait until no transaction is active anymore.
+ while (1) {
+ if (this.txLevel == TransactionLevel.None) {
+ break;
+ }
+ await this.transactionDoneCond.wait();
+ }
+
+ this._prep(sqlBegin).run();
+ const versionRes = this._prep(sqlGetDatabaseVersion).getFirst({
+ name: databaseName,
+ });
+ let ver: number;
+ if (versionRes == undefined) {
+ this._prep(sqlCreateDatabase).run({ name: databaseName });
+ ver = 0;
+ } else {
+ const verNum = expectDbNumber(versionRes, "version");
+ assertDbInvariant(typeof verNum === "number");
+ ver = verNum;
+ }
+ const objectStoreNames: string[] = this._loadObjectStoreNames(databaseName);
+
+ this._prep(sqlCommit).run();
+
+ this.connectionMap.set(connectionCookie, {
+ databaseName: databaseName,
+ });
+
+ return {
+ conn: {
+ connectionCookie,
+ },
+ version: ver,
+ objectStores: objectStoreNames,
+ };
+ }
+
+ private _loadScopeInfo(connInfo: ConnectionInfo, storeName: string): void {
+ const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+ name: storeName,
+ database_name: connInfo.databaseName,
+ });
+ if (!objRes) {
+ throw Error("object store not found");
+ }
+ const objectStoreId = expectDbNumber(objRes, "id");
+ const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
+ object_store_id: objectStoreId,
+ });
+ if (!indexRes) {
+ throw Error("db inconsistent");
+ }
+ const indexMap = new Map<string, ScopeIndexInfo>();
+ for (const idxInfo of indexRes) {
+ const indexId = expectDbNumber(idxInfo, "id");
+ const indexName = expectDbString(idxInfo, "name");
+ const indexUnique = expectDbNumber(idxInfo, "unique_index");
+ const indexMultiEntry = expectDbNumber(idxInfo, "multientry");
+ const indexKeyPath = deserializeKeyPath(
+ expectDbString(idxInfo, "key_path"),
+ );
+ if (!indexKeyPath) {
+ throw Error("db inconsistent");
+ }
+ indexMap.set(indexName, {
+ indexId,
+ keyPath: indexKeyPath,
+ multiEntry: indexMultiEntry != 0,
+ unique: indexUnique != 0,
+ });
+ }
+ this.txScope.set(storeName, {
+ objectStoreId,
+ indexMap,
+ });
+ }
+
+ async beginTransaction(
+ conn: DatabaseConnection,
+ objectStores: string[],
+ mode: IDBTransactionMode,
+ ): Promise<DatabaseTransaction> {
+ const connInfo = this.connectionMap.get(conn.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ const transactionCookie = `tx-${this.transactionIdCounter++}`;
+
+ while (1) {
+ if (this.txLevel === TransactionLevel.None) {
+ break;
+ }
+ await this.transactionDoneCond.wait();
+ }
+
+ if (this.trackStats) {
+ if (mode === "readonly") {
+ this.accessStats.readTransactions++;
+ } else if (mode === "readwrite") {
+ this.accessStats.writeTransactions++;
+ }
+ }
+
+ this._prep(sqlBegin).run();
+ if (mode === "readonly") {
+ this.txLevel = TransactionLevel.Read;
+ } else if (mode === "readwrite") {
+ this.txLevel = TransactionLevel.Write;
+ }
+
+ this.transactionMap.set(transactionCookie, {
+ connectionCookie: conn.connectionCookie,
+ });
+
+ // FIXME: We should check this
+ // if (this.txScope.size != 0) {
+ // // Something didn't clean up!
+ // throw Error("scope not empty");
+ // }
+ this.txScope.clear();
+
+ // FIXME: Use cached info from connection?
+ for (const storeName of objectStores) {
+ this._loadScopeInfo(connInfo, storeName);
+ }
+
+ return {
+ transactionCookie,
+ };
+ }
+
+ async enterVersionChange(
+ conn: DatabaseConnection,
+ newVersion: number,
+ ): Promise<DatabaseTransaction> {
+ const connInfo = this.connectionMap.get(conn.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.enableTracing) {
+ console.log(
+ `entering version change transaction (conn ${conn.connectionCookie}), newVersion=${newVersion}`,
+ );
+ }
+ const transactionCookie = `tx-vc-${this.transactionIdCounter++}`;
+
+ while (1) {
+ if (this.txLevel === TransactionLevel.None) {
+ break;
+ }
+ await this.transactionDoneCond.wait();
+ }
+
+ // FIXME: We should check this
+ // if (this.txScope.size != 0) {
+ // // Something didn't clean up!
+ // throw Error("scope not empty");
+ // }
+ this.txScope.clear();
+
+ if (this.enableTracing) {
+ console.log(`version change transaction unblocked`);
+ }
+
+ this._prep(sqlBegin).run();
+ this.txLevel = TransactionLevel.VersionChange;
+
+ this.transactionMap.set(transactionCookie, {
+ connectionCookie: conn.connectionCookie,
+ });
+
+ this._prep(sqlUpdateDbVersion).run({
+ name: connInfo.databaseName,
+ version: newVersion,
+ });
+
+ const objectStoreNames = this._loadObjectStoreNames(connInfo.databaseName);
+
+ // FIXME: Use cached info from connection?
+ for (const storeName of objectStoreNames) {
+ this._loadScopeInfo(connInfo, storeName);
+ }
+
+ return {
+ transactionCookie,
+ };
+ }
+
+ async deleteDatabase(databaseName: string): Promise<void> {
+ // FIXME: Wait until connection queue is not blocked
+ // FIXME: To properly implement the spec semantics, maybe
+ // split delete into prepareDelete and executeDelete?
+
+ while (this.txLevel !== TransactionLevel.None) {
+ await this.transactionDoneCond.wait();
+ }
+
+ this._prep(sqlBegin).run();
+ const objectStoreNames = this._loadObjectStoreNames(databaseName);
+ for (const storeName of objectStoreNames) {
+ const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({
+ name: storeName,
+ database_name: databaseName,
+ });
+ if (!objRes) {
+ throw Error("object store not found");
+ }
+ const objectStoreId = expectDbNumber(objRes, "id");
+ const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({
+ object_store_id: objectStoreId,
+ });
+ if (!indexRes) {
+ throw Error("db inconsistent");
+ }
+ const indexMap = new Map<string, ScopeIndexInfo>();
+ for (const idxInfo of indexRes) {
+ const indexId = expectDbNumber(idxInfo, "id");
+ const indexName = expectDbString(idxInfo, "name");
+ const indexUnique = expectDbNumber(idxInfo, "unique_index");
+ const indexMultiEntry = expectDbNumber(idxInfo, "multientry");
+ const indexKeyPath = deserializeKeyPath(
+ expectDbString(idxInfo, "key_path"),
+ );
+ if (!indexKeyPath) {
+ throw Error("db inconsistent");
+ }
+ indexMap.set(indexName, {
+ indexId,
+ keyPath: indexKeyPath,
+ multiEntry: indexMultiEntry != 0,
+ unique: indexUnique != 0,
+ });
+ }
+ this.txScope.set(storeName, {
+ objectStoreId,
+ indexMap,
+ });
+
+ for (const indexInfo of indexMap.values()) {
+ let stmt: Sqlite3Statement;
+ if (indexInfo.unique) {
+ stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
+ } else {
+ stmt = this._prep(sqlIndexDataDeleteAll);
+ }
+ stmt.run({
+ index_id: indexInfo.indexId,
+ });
+ this._prep(sqlIndexDelete).run({
+ index_id: indexInfo.indexId,
+ });
+ }
+ this._prep(sqlObjectDataDeleteAll).run({
+ object_store_id: objectStoreId,
+ });
+ this._prep(sqlObjectStoreDelete).run({
+ object_store_id: objectStoreId,
+ });
+ }
+ this._prep(sqlDeleteDatabase).run({
+ name: databaseName,
+ });
+ this._prep(sqlCommit).run();
+ }
+
+ async close(db: DatabaseConnection): Promise<void> {
+ const connInfo = this.connectionMap.get(db.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ // FIXME: What if we're in a transaction? Does the backend interface allow this?
+ // if (this.txLevel !== TransactionLevel.None) {
+ // throw Error("can't close while in transaction");
+ // }
+ if (this.enableTracing) {
+ console.log(`closing connection ${db.connectionCookie}`);
+ }
+ this.connectionMap.delete(db.connectionCookie);
+ }
+
+ renameObjectStore(
+ btx: DatabaseTransaction,
+ oldName: string,
+ newName: string,
+ ): void {
+ if (this.enableTracing) {
+ console.log(`renaming object store '${oldName}' to '${newName}'`);
+ }
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction required");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("not connected");
+ }
+ // FIXME: Would be much nicer with numeric UID handles
+ const scopeInfo = this.txScope.get(oldName);
+ if (!scopeInfo) {
+ throw Error("object store not found");
+ }
+ this.txScope.delete(oldName);
+ this.txScope.set(newName, scopeInfo);
+ this._prep(sqlRenameObjectStore).run({
+ object_store_id: scopeInfo.objectStoreId,
+ name: newName,
+ });
+ }
+
+ renameIndex(
+ btx: DatabaseTransaction,
+ objectStoreName: string,
+ oldIndexName: string,
+ newIndexName: string,
+ ): void {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction required");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("not connected");
+ }
+ // FIXME: Would be much nicer with numeric UID handles
+ const scopeInfo = this.txScope.get(objectStoreName);
+ if (!scopeInfo) {
+ throw Error("object store not found");
+ }
+ const indexInfo = scopeInfo.indexMap.get(oldIndexName);
+ if (!indexInfo) {
+ throw Error("index not found");
+ }
+ // FIXME: Would also be much nicer with numeric UID handles
+ scopeInfo.indexMap.delete(oldIndexName);
+ scopeInfo.indexMap.set(newIndexName, indexInfo);
+ this._prep(sqlRenameIndex).run({
+ index_id: indexInfo.indexId,
+ name: newIndexName,
+ });
+ }
+
+ deleteObjectStore(btx: DatabaseTransaction, name: string): void {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction required");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("not connected");
+ }
+ // FIXME: Would be much nicer with numeric UID handles
+ const scopeInfo = this.txScope.get(name);
+ if (!scopeInfo) {
+ throw Error("object store not found");
+ }
+ for (const indexInfo of scopeInfo.indexMap.values()) {
+ let stmt: Sqlite3Statement;
+ if (indexInfo.unique) {
+ stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
+ } else {
+ stmt = this._prep(sqlIndexDataDeleteAll);
+ }
+ stmt.run({
+ index_id: indexInfo.indexId,
+ });
+ this._prep(sqlIndexDelete).run({
+ index_id: indexInfo.indexId,
+ });
+ }
+ this._prep(sqlObjectDataDeleteAll).run({
+ object_store_id: scopeInfo.objectStoreId,
+ });
+ this._prep(sqlObjectStoreDelete).run({
+ object_store_id: scopeInfo.objectStoreId,
+ });
+ this.txScope.delete(name);
+ }
+
+ deleteIndex(
+ btx: DatabaseTransaction,
+ objectStoreName: string,
+ indexName: string,
+ ): void {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction required");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("not connected");
+ }
+ // FIXME: Would be much nicer with numeric UID handles
+ const scopeInfo = this.txScope.get(objectStoreName);
+ if (!scopeInfo) {
+ throw Error("object store not found");
+ }
+ const indexInfo = scopeInfo.indexMap.get(indexName);
+ if (!indexInfo) {
+ throw Error("index not found");
+ }
+ scopeInfo.indexMap.delete(indexName);
+ let stmt: Sqlite3Statement;
+ if (indexInfo.unique) {
+ stmt = this._prep(sqlIUniqueIndexDataDeleteAll);
+ } else {
+ stmt = this._prep(sqlIndexDataDeleteAll);
+ }
+ stmt.run({
+ index_id: indexInfo.indexId,
+ });
+ this._prep(sqlIndexDelete).run({
+ index_id: indexInfo.indexId,
+ });
+ }
+
+ async rollback(btx: DatabaseTransaction): Promise<void> {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ if (this.enableTracing) {
+ console.log(`rolling back transaction ${btx.transactionCookie}`);
+ }
+ if (this.txLevel === TransactionLevel.None) {
+ return;
+ }
+ this._prep(sqlRollback).run();
+ this.txLevel = TransactionLevel.None;
+ this.transactionMap.delete(btx.transactionCookie);
+ this.txScope.clear();
+ this.transactionDoneCond.trigger();
+ }
+
+ async commit(btx: DatabaseTransaction): Promise<void> {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ if (this.enableTracing) {
+ console.log(`committing transaction ${btx.transactionCookie}`);
+ }
+ if (this.txLevel === TransactionLevel.None) {
+ return;
+ }
+ this._prep(sqlCommit).run();
+ this.txLevel = TransactionLevel.None;
+ this.txScope.clear();
+ this.transactionMap.delete(btx.transactionCookie);
+ this.transactionDoneCond.trigger();
+ }
+
+ createObjectStore(
+ btx: DatabaseTransaction,
+ name: string,
+ keyPath: string | string[] | null,
+ autoIncrement: boolean,
+ ): void {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.txLevel < TransactionLevel.VersionChange) {
+ throw Error("only allowed in versionchange transaction");
+ }
+ if (this.txScope.has(name)) {
+ throw Error("object store already exists");
+ }
+ let myKeyPath = serializeKeyPath(keyPath);
+ const runRes = this._prep(sqlCreateObjectStore).run({
+ name,
+ key_path: myKeyPath,
+ auto_increment: autoIncrement ? 1 : 0,
+ database_name: connInfo.databaseName,
+ });
+ this.txScope.set(name, {
+ objectStoreId: runRes.lastInsertRowid,
+ indexMap: new Map(),
+ });
+ }
+
+ createIndex(
+ btx: DatabaseTransaction,
+ indexName: string,
+ objectStoreName: string,
+ keyPath: string | string[],
+ multiEntry: boolean,
+ unique: boolean,
+ ): void {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.txLevel < TransactionLevel.VersionChange) {
+ throw Error("only allowed in versionchange transaction");
+ }
+ const scopeInfo = this.txScope.get(objectStoreName);
+ if (!scopeInfo) {
+ throw Error("object store does not exist, can't create index");
+ }
+ if (scopeInfo.indexMap.has(indexName)) {
+ throw Error("index already exists");
+ }
+
+ if (this.enableTracing) {
+ console.log(`creating index "${indexName}"`);
+ }
+
+ const res = this._prep(sqlCreateIndex).run({
+ object_store_id: scopeInfo.objectStoreId,
+ name: indexName,
+ key_path: serializeKeyPath(keyPath),
+ unique: unique ? 1 : 0,
+ multientry: multiEntry ? 1 : 0,
+ });
+ const scopeIndexInfo: ScopeIndexInfo = {
+ indexId: res.lastInsertRowid,
+ keyPath,
+ multiEntry,
+ unique,
+ };
+ scopeInfo.indexMap.set(indexName, scopeIndexInfo);
+
+ // FIXME: We can't use an iterator here, as it's not allowed to
+ // execute a write statement while the iterator executes.
+ // Maybe do multiple selects instead of loading everything into memory?
+ const keyRowsRes = this._prep(sqlObjectDataGetAll).getAll({
+ object_store_id: scopeInfo.objectStoreId,
+ });
+
+ for (const keyRow of keyRowsRes) {
+ assertDbInvariant(typeof keyRow === "object" && keyRow != null);
+ assertDbInvariant("key" in keyRow);
+ assertDbInvariant("value" in keyRow);
+ assertDbInvariant(typeof keyRow.value === "string");
+ const key = keyRow.key;
+ const value = structuredRevive(JSON.parse(keyRow.value));
+ assertDbInvariant(key instanceof Uint8Array);
+ try {
+ this.insertIntoIndex(scopeIndexInfo, key, value);
+ } catch (e) {
+ // FIXME: Catch this in insertIntoIndex!
+ if (e instanceof DataError) {
+ // https://www.w3.org/TR/IndexedDB-2/#object-store-storage-operation
+ // Do nothing
+ } else {
+ throw e;
+ }
+ }
+ }
+ }
+
+ async deleteRecord(
+ btx: DatabaseTransaction,
+ objectStoreName: string,
+ range: BridgeIDBKeyRange,
+ ): Promise<void> {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.txLevel < TransactionLevel.Write) {
+ throw Error("store operation only allowed while running a transaction");
+ }
+ const scopeInfo = this.txScope.get(objectStoreName);
+ if (!scopeInfo) {
+ throw Error(
+ `object store ${JSON.stringify(
+ objectStoreName,
+ )} not in transaction scope`,
+ );
+ }
+
+ // PERF: We delete keys one-by-one here.
+ // Instead, we could do it with a single
+ // delete query for the object data / index data.
+
+ let currKey: Uint8Array | null = null;
+
+ if (range.lower != null) {
+ const targetKey = serializeKey(range.lower);
+ currKey = this._continueObjectKey({
+ objectStoreId: scopeInfo.objectStoreId,
+ currentKey: null,
+ forward: true,
+ inclusive: true,
+ targetKey,
+ });
+ } else {
+ currKey = this._startObjectKey(scopeInfo.objectStoreId, true);
+ }
+
+ let upperBound: Uint8Array | undefined;
+ if (range.upper != null) {
+ upperBound = serializeKey(range.upper);
+ }
+
+ // loop invariant: (currKey is undefined) or (currKey is a valid key)
+ while (true) {
+ if (!currKey) {
+ break;
+ }
+
+ // FIXME: Check if we're past the range!
+ if (upperBound != null) {
+ const cmp = compareSerializedKeys(currKey, upperBound);
+ if (cmp > 0) {
+ break;
+ }
+ if (cmp == 0 && range.upperOpen) {
+ break;
+ }
+ }
+
+ // Now delete!
+
+ this._prep(sqlObjectDataDeleteKey).run({
+ object_store_id: scopeInfo.objectStoreId,
+ key: currKey,
+ });
+
+ for (const index of scopeInfo.indexMap.values()) {
+ let stmt: Sqlite3Statement;
+ if (index.unique) {
+ stmt = this._prep(sqlUniqueIndexDataDeleteKey);
+ } else {
+ stmt = this._prep(sqlIndexDataDeleteKey);
+ }
+ stmt.run({
+ index_id: index.indexId,
+ object_key: currKey,
+ });
+ }
+
+ currKey = this._continueObjectKey({
+ objectStoreId: scopeInfo.objectStoreId,
+ currentKey: null,
+ forward: true,
+ inclusive: false,
+ targetKey: currKey,
+ });
+ }
+ }
+
+ async storeRecord(
+ btx: DatabaseTransaction,
+ storeReq: RecordStoreRequest,
+ ): Promise<RecordStoreResponse> {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.txLevel < TransactionLevel.Write) {
+ throw Error("store operation only allowed while running a transaction");
+ }
+ const scopeInfo = this.txScope.get(storeReq.objectStoreName);
+ if (!scopeInfo) {
+ throw Error(
+ `object store ${JSON.stringify(
+ storeReq.objectStoreName,
+ )} not in transaction scope`,
+ );
+ }
+ const metaRes = this._prep(sqlGetObjectStoreMetaById).getFirst({
+ id: scopeInfo.objectStoreId,
+ });
+ if (metaRes === undefined) {
+ throw Error(
+ `object store ${JSON.stringify(
+ storeReq.objectStoreName,
+ )} does not exist`,
+ );
+ }
+ assertDbInvariant(!!metaRes && typeof metaRes === "object");
+ assertDbInvariant("key_path" in metaRes);
+ assertDbInvariant("auto_increment" in metaRes);
+ const dbKeyPath = metaRes.key_path;
+ assertDbInvariant(dbKeyPath === null || typeof dbKeyPath === "string");
+ const keyPath = deserializeKeyPath(dbKeyPath);
+ const autoIncrement = metaRes.auto_increment;
+ assertDbInvariant(typeof autoIncrement === "number");
+
+ let key;
+ let value;
+ let updatedKeyGenerator: number | undefined;
+
+ if (storeReq.storeLevel === StoreLevel.UpdateExisting) {
+ if (storeReq.key == null) {
+ throw Error("invalid update request (key not given)");
+ }
+ key = storeReq.key;
+ value = storeReq.value;
+ } else {
+ if (keyPath != null && storeReq.key !== undefined) {
+ // If in-line keys are used, a key can't be explicitly specified.
+ throw new DataError();
+ }
+
+ const storeKeyResult = makeStoreKeyValue({
+ value: storeReq.value,
+ key: storeReq.key,
+ currentKeyGenerator: autoIncrement,
+ autoIncrement: autoIncrement != 0,
+ keyPath: keyPath,
+ });
+
+ if (autoIncrement != 0) {
+ updatedKeyGenerator = storeKeyResult.updatedKeyGenerator;
+ }
+
+ key = storeKeyResult.key;
+ value = storeKeyResult.value;
+ }
+
+ const serializedObjectKey = serializeKey(key);
+
+ const existingObj = this._getObjectValue(
+ scopeInfo.objectStoreId,
+ serializedObjectKey,
+ );
+
+ if (storeReq.storeLevel === StoreLevel.NoOverwrite) {
+ if (existingObj) {
+ throw new ConstraintError();
+ }
+ }
+
+ this._prep(sqlInsertObjectData).run({
+ object_store_id: scopeInfo.objectStoreId,
+ key: serializedObjectKey,
+ value: JSON.stringify(structuredEncapsulate(value)),
+ });
+
+ if (autoIncrement != 0) {
+ this._prep(sqlUpdateAutoIncrement).run({
+ object_store_id: scopeInfo.objectStoreId,
+ auto_increment: updatedKeyGenerator,
+ });
+ }
+
+ for (const [k, indexInfo] of scopeInfo.indexMap.entries()) {
+ if (existingObj) {
+ this.deleteFromIndex(
+ indexInfo.indexId,
+ indexInfo.unique,
+ serializedObjectKey,
+ );
+ }
+
+ try {
+ this.insertIntoIndex(indexInfo, serializedObjectKey, value);
+ } catch (e) {
+ // FIXME: handle this in insertIntoIndex!
+ if (e instanceof DataError) {
+ // We don't propagate this error here.
+ continue;
+ }
+ throw e;
+ }
+ }
+
+ if (this.trackStats) {
+ this.accessStats.writesPerStore[storeReq.objectStoreName] =
+ (this.accessStats.writesPerStore[storeReq.objectStoreName] ?? 0) + 1;
+ }
+
+ return {
+ key: key,
+ };
+ }
+
+ private deleteFromIndex(
+ indexId: SqliteRowid,
+ indexUnique: boolean,
+ objectKey: Uint8Array,
+ ): void {
+ let stmt: Sqlite3Statement;
+ if (indexUnique) {
+ stmt = this._prep(sqlUniqueIndexDataDeleteKey);
+ } else {
+ stmt = this._prep(sqlIndexDataDeleteKey);
+ }
+ stmt.run({
+ index_id: indexId,
+ object_key: objectKey,
+ });
+ }
+
+ private insertIntoIndex(
+ indexInfo: ScopeIndexInfo,
+ primaryKey: Uint8Array,
+ value: any,
+ ): void {
+ const indexKeys = getIndexKeys(
+ value,
+ indexInfo.keyPath,
+ indexInfo.multiEntry,
+ );
+ if (!indexKeys.length) {
+ return;
+ }
+
+ let stmt;
+ if (indexInfo.unique) {
+ stmt = this._prep(sqlInsertUniqueIndexData);
+ } else {
+ stmt = this._prep(sqlInsertIndexData);
+ }
+
+ for (const indexKey of indexKeys) {
+ // FIXME: Re-throw correct error for unique index violations
+ const serializedIndexKey = serializeKey(indexKey);
+ try {
+ stmt.run({
+ index_id: indexInfo.indexId,
+ object_key: primaryKey,
+ index_key: serializedIndexKey,
+ });
+ } catch (e: any) {
+ if (e.code === SqliteError.constraintPrimarykey) {
+ throw new ConstraintError();
+ }
+ throw e;
+ }
+ }
+ }
+
+ clearObjectStore(
+ btx: DatabaseTransaction,
+ objectStoreName: string,
+ ): Promise<void> {
+ const txInfo = this.transactionMap.get(btx.transactionCookie);
+ if (!txInfo) {
+ throw Error("transaction not found");
+ }
+ const connInfo = this.connectionMap.get(txInfo.connectionCookie);
+ if (!connInfo) {
+ throw Error("connection not found");
+ }
+ if (this.txLevel < TransactionLevel.Write) {
+ throw Error("store operation only allowed while running a transaction");
+ }
+ const scopeInfo = this.txScope.get(objectStoreName);
+ if (!scopeInfo) {
+ throw Error(
+ `object store ${JSON.stringify(
+ objectStoreName,
+ )} not in transaction scope`,
+ );
+ }
+
+ throw new Error("Method not implemented.");
+ }
+}
+
+const schemaSql = `
+CREATE TABLE IF NOT EXISTS databases
+( name TEXT PRIMARY KEY
+, version INTEGER NOT NULL
+);
+
+CREATE TABLE IF NOT EXISTS object_stores
+( id INTEGER PRIMARY KEY
+, database_name NOT NULL
+, name TEXT NOT NULL
+, key_path TEXT
+, auto_increment INTEGER NOT NULL DEFAULT 0
+, FOREIGN KEY (database_name)
+ REFERENCES databases(name)
+);
+
+CREATE TABLE IF NOT EXISTS indexes
+( id INTEGER PRIMARY KEY
+, object_store_id INTEGER NOT NULL
+, name TEXT NOT NULL
+, key_path TEXT NOT NULL
+, unique_index INTEGER NOT NULL
+, multientry INTEGER NOT NULL
+, FOREIGN KEY (object_store_id)
+ REFERENCES object_stores(id)
+);
+
+CREATE TABLE IF NOT EXISTS object_data
+( object_store_id INTEGER NOT NULL
+, key BLOB NOT NULL
+, value TEXT NOT NULL
+, PRIMARY KEY (object_store_id, key)
+);
+
+CREATE TABLE IF NOT EXISTS index_data
+( index_id INTEGER NOT NULL
+, index_key BLOB NOT NULL
+, object_key BLOB NOT NULL
+, PRIMARY KEY (index_id, index_key, object_key)
+, FOREIGN KEY (index_id)
+ REFERENCES indexes(id)
+);
+
+CREATE TABLE IF NOT EXISTS unique_index_data
+( index_id INTEGER NOT NULL
+, index_key BLOB NOT NULL
+, object_key BLOB NOT NULL
+, PRIMARY KEY (index_id, index_key)
+, FOREIGN KEY (index_id)
+ REFERENCES indexes(id)
+);
+`;
+
+const sqlListDatabases = `
+SELECT name, version FROM databases;
+`;
+
+const sqlGetDatabaseVersion = `
+SELECT version FROM databases WHERE name=$name;
+`;
+
+const sqlBegin = `BEGIN;`;
+const sqlCommit = `COMMIT;`;
+const sqlRollback = `ROLLBACK;`;
+
+const sqlCreateDatabase = `
+INSERT INTO databases (name, version) VALUES ($name, 1);
+`;
+
+const sqlDeleteDatabase = `
+DELETE FROM databases
+WHERE name=$name;
+`;
+
+const sqlCreateObjectStore = `
+INSERT INTO object_stores (name, database_name, key_path, auto_increment)
+ VALUES ($name, $database_name, $key_path, $auto_increment);
+`;
+
+const sqlObjectStoreDelete = `
+DELETE FROM object_stores
+WHERE id=$object_store_id;`;
+
+const sqlObjectDataDeleteAll = `
+DELETE FROM object_data
+WHERE object_store_id=$object_store_id`;
+
+const sqlIndexDelete = `
+DELETE FROM indexes
+WHERE id=$index_id;
+`;
+
+const sqlIndexDataDeleteAll = `
+DELETE FROM index_data
+WHERE index_id=$index_id;
+`;
+
+const sqlIUniqueIndexDataDeleteAll = `
+DELETE FROM unique_index_data
+WHERE index_id=$index_id;
+`;
+
+const sqlCreateIndex = `
+INSERT INTO indexes (object_store_id, name, key_path, unique_index, multientry)
+ VALUES ($object_store_id, $name, $key_path, $unique, $multientry);
+`;
+
+const sqlInsertIndexData = `
+INSERT INTO index_data (index_id, object_key, index_key)
+ VALUES ($index_id, $object_key, $index_key);`;
+
+const sqlInsertUniqueIndexData = `
+INSERT INTO unique_index_data (index_id, object_key, index_key)
+ VALUES ($index_id, $object_key, $index_key);`;
+
+const sqlUpdateDbVersion = `
+UPDATE databases
+ SET version=$version
+ WHERE name=$name;
+`;
+
+const sqlRenameObjectStore = `
+UPDATE object_stores
+ SET name=$name
+ WHERE id=$object_store_id`;
+
+const sqlRenameIndex = `
+UPDATE indexes
+ SET name=$name
+ WHERE index_id=$index_id`;
+
+const sqlGetObjectStoresByDatabase = `
+SELECT id, name, key_path, auto_increment
+FROM object_stores
+WHERE database_name=$database_name;
+`;
+
+const sqlGetObjectStoreMetaById = `
+SELECT key_path, auto_increment
+FROM object_stores
+WHERE id = $id;
+`;
+
+const sqlGetObjectStoreMetaByName = `
+SELECT id, key_path, auto_increment
+FROM object_stores
+WHERE database_name=$database_name AND name=$name;
+`;
+
+const sqlGetIndexesByObjectStoreId = `
+SELECT id, name, key_path, unique_index, multientry
+FROM indexes
+WHERE object_store_id=$object_store_id
+`;
+
+const sqlGetIndexByName = `
+SELECT id, key_path, unique_index, multientry
+FROM indexes
+WHERE object_store_id=$object_store_id
+ AND name=$name
+`;
+
+const sqlInsertObjectData = `
+INSERT OR REPLACE INTO object_data(object_store_id, key, value)
+ VALUES ($object_store_id, $key, $value);
+`;
+
+const sqlUpdateAutoIncrement = `
+UPDATE object_stores
+ SET auto_increment=$auto_increment
+ WHERE id=$object_store_id
+`;
+
+const sqlObjectDataValueFromKey = `
+SELECT value FROM object_data
+ WHERE object_store_id=$object_store_id
+ AND key=$key;
+`;
+
+const sqlObjectDataGetAll = `
+SELECT key, value FROM object_data
+ WHERE object_store_id=$object_store_id;`;
+
+const sqlObjectDataStartForward = `
+SELECT min(key) as rkey FROM object_data
+ WHERE object_store_id=$object_store_id;`;
+
+const sqlObjectDataStartBackward = `
+SELECT max(key) as rkey FROM object_data
+ WHERE object_store_id=$object_store_id;`;
+
+const sqlObjectDataContinueForward = `
+SELECT min(key) as rkey FROM object_data
+ WHERE object_store_id=$object_store_id
+ AND key > $x;`;
+
+const sqlObjectDataContinueBackward = `
+SELECT max(key) as rkey FROM object_data
+ WHERE object_store_id=$object_store_id
+ AND key < $x;`;
+
+const sqlObjectDataContinueForwardInclusive = `
+SELECT min(key) as rkey FROM object_data
+ WHERE object_store_id=$object_store_id
+ AND key >= $x;`;
+
+const sqlObjectDataContinueBackwardInclusive = `
+SELECT max(key) as rkey FROM object_data
+ WHERE object_store_id=$object_store_id
+ AND key <= $x;`;
+
+const sqlObjectDataDeleteKey = `
+DELETE FROM object_data
+ WHERE object_store_id=$object_store_id AND
+ key=$key`;
+
+const sqlIndexDataDeleteKey = `
+DELETE FROM index_data
+ WHERE index_id=$index_id AND
+ object_key=$object_key;
+`;
+
+const sqlUniqueIndexDataDeleteKey = `
+DELETE FROM unique_index_data
+ WHERE index_id=$index_id AND
+ object_key=$object_key;
+`;
+
+// "next" or "nextunique" on a non-unique index
+const sqlIndexDataStartForward = `
+SELECT index_key, object_key FROM index_data
+ WHERE index_id=$index_id
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// start a "next" or "nextunique" on a unique index
+const sqlUniqueIndexDataStartForward = `
+SELECT index_key, object_key FROM unique_index_data
+ WHERE index_id=$index_id
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// start a "prev" or "prevunique" on a unique index
+const sqlUniqueIndexDataStartBackward = `
+SELECT index_key, object_key FROM unique_index_data
+ WHERE index_id=$index_id
+ ORDER BY index_key DESC, object_key DESC
+ LIMIT 1
+`;
+
+// start a "prevunique" query on a non-unique index
+const sqlIndexDataStartBackwardUnique = `
+SELECT index_key, object_key FROM index_data
+ WHERE index_id=$index_id
+ ORDER BY index_key DESC, object_key ASC
+ LIMIT 1
+`;
+
+// start a "prev" query on a non-unique index
+const sqlIndexDataStartBackward = `
+SELECT index_key, object_key FROM index_data
+ WHERE index_id=$index_id
+ ORDER BY index_key DESC, object_key DESC
+ LIMIT 1
+`;
+
+// continue a "next" query, strictly go to a further key
+const sqlIndexDataContinueForwardStrict = `
+SELECT index_key, object_key FROM index_data
+ WHERE
+ index_id=$index_id AND
+ ((index_key = $index_key AND object_key > $object_key) OR
+ (index_key > $index_key))
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// continue a "next" query, go to at least the specified key
+const sqlIndexDataContinueForwardInclusive = `
+SELECT index_key, object_key FROM index_data
+ WHERE
+ index_id=$index_id AND
+ ((index_key = $index_key AND object_key >= $object_key) OR
+ (index_key > $index_key))
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// continue a "prev" query
+const sqlIndexDataContinueBackwardStrict = `
+SELECT index_key, object_key FROM index_data
+ WHERE
+ index_id=$index_id AND
+ ((index_key = $index_key AND object_key < $object_key) OR
+ (index_key < $index_key))
+ ORDER BY index_key DESC, object_key DESC
+ LIMIT 1;
+`;
+
+// continue a "prev" query
+const sqlIndexDataContinueBackwardInclusive = `
+SELECT index_key, object_key FROM index_data
+ WHERE
+ index_id=$index_id AND
+ ((index_key = $index_key AND object_key <= $object_key) OR
+ (index_key < $index_key))
+ ORDER BY index_key DESC, object_key DESC
+ LIMIT 1;
+`;
+
+// continue a "prevunique" query
+const sqlIndexDataContinueBackwardStrictUnique = `
+SELECT index_key, object_key FROM index_data
+ WHERE index_id=$index_id AND index_key < $index_key
+ ORDER BY index_key DESC, object_key ASC
+ LIMIT 1;
+`;
+
+// continue a "prevunique" query
+const sqlIndexDataContinueBackwardInclusiveUnique = `
+SELECT index_key, object_key FROM index_data
+ WHERE index_id=$index_id AND index_key <= $index_key
+ ORDER BY index_key DESC, object_key ASC
+ LIMIT 1;
+`;
+
+// continue a "next" query, no target object key
+const sqlIndexDataContinueForwardStrictUnique = `
+SELECT index_key, object_key FROM index_data
+ WHERE index_id=$index_id AND index_key > $index_key
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// continue a "next" query, no target object key
+const sqlIndexDataContinueForwardInclusiveUnique = `
+SELECT index_key, object_key FROM index_data
+ WHERE index_id=$index_id AND index_key >= $index_key
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// continue a "next" query, strictly go to a further key
+const sqlUniqueIndexDataContinueForwardStrict = `
+SELECT index_key, object_key FROM unique_index_data
+ WHERE index_id=$index_id AND index_key > $index_key
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// continue a "next" query, go to at least the specified key
+const sqlUniqueIndexDataContinueForwardInclusive = `
+SELECT index_key, object_key FROM unique_index_data
+ WHERE index_id=$index_id AND index_key >= $index_key
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// continue a "prev" query
+const sqlUniqueIndexDataContinueBackwardStrict = `
+SELECT index_key, object_key FROM unique_index_data
+ WHERE index_id=$index_id AND index_key < $index_key
+ ORDER BY index_key, object_key
+ LIMIT 1;
+`;
+
+// continue a "prev" query
+const sqlUniqueIndexDataContinueBackwardInclusive = `
+SELECT index_key, object_key FROM unique_index_data
+ WHERE index_id=$index_id AND index_key <= $index_key
+ ORDER BY index_key DESC, object_key DESC
+ LIMIT 1;
+`;
+
+export interface SqliteBackendOptions {
+ filename: string;
+}
+
+export async function createSqliteBackend(
+ sqliteImpl: Sqlite3Interface,
+ options: SqliteBackendOptions,
+): Promise<SqliteBackend> {
+ const db = sqliteImpl.open(options.filename);
+ db.exec("PRAGMA foreign_keys = ON;");
+ db.exec(schemaSql);
+ return new SqliteBackend(sqliteImpl, db);
+}
diff --git a/packages/idb-bridge/src/backend-common.ts b/packages/idb-bridge/src/backend-common.ts
new file mode 100644
index 000000000..d52071939
--- /dev/null
+++ b/packages/idb-bridge/src/backend-common.ts
@@ -0,0 +1,29 @@
+import { openPromise } from "./util/openPromise.js";
+
+export class AsyncCondition {
+ _waitPromise: Promise<void>;
+ _resolveWaitPromise: () => void;
+ constructor() {
+ const op = openPromise<void>();
+ this._waitPromise = op.promise;
+ this._resolveWaitPromise = op.resolve;
+ }
+
+ wait(): Promise<void> {
+ return this._waitPromise;
+ }
+
+ trigger(): void {
+ this._resolveWaitPromise();
+ const op = openPromise<void>();
+ this._waitPromise = op.promise;
+ this._resolveWaitPromise = op.resolve;
+ }
+}
+
+export enum TransactionLevel {
+ None = 0,
+ Read = 1,
+ Write = 2,
+ VersionChange = 3,
+}
diff --git a/packages/idb-bridge/src/backend-interface.ts b/packages/idb-bridge/src/backend-interface.ts
index a21515544..3255261e2 100644
--- a/packages/idb-bridge/src/backend-interface.ts
+++ b/packages/idb-bridge/src/backend-interface.ts
@@ -21,66 +21,45 @@ import {
IDBValidKey,
} from "./idbtypes.js";
-/** @public */
-export interface ObjectStoreProperties {
- keyPath: string[] | null;
- autoIncrement: boolean;
- indexes: { [nameame: string]: IndexProperties };
-}
-
-/** @public */
-export interface IndexProperties {
- keyPath: string[];
- multiEntry: boolean;
- unique: boolean;
-}
-
-/** @public */
-export interface Schema {
- databaseName: string;
- databaseVersion: number;
- objectStores: { [name: string]: ObjectStoreProperties };
+export interface ConnectResult {
+ conn: DatabaseConnection;
+ version: number;
+ objectStores: string[];
}
-/** @public */
export interface DatabaseConnection {
connectionCookie: string;
}
-/** @public */
export interface DatabaseTransaction {
transactionCookie: string;
}
-/** @public */
export enum ResultLevel {
OnlyCount,
OnlyKeys,
Full,
}
-/** @public */
export enum StoreLevel {
NoOverwrite,
AllowOverwrite,
UpdateExisting,
}
-/** @public */
-export interface RecordGetRequest {
+
+export interface IndexGetQuery {
direction: IDBCursorDirection;
objectStoreName: string;
- indexName: string | undefined;
+ indexName: string;
/**
* The range of keys to return.
- * If indexName is defined, the range refers to the index keys.
- * Otherwise it refers to the object store keys.
+ * The range refers to the index keys.
*/
range: BridgeIDBKeyRange | undefined | null;
/**
* Last cursor position in terms of the index key.
- * Can only be specified if indexName is defined and
- * lastObjectStorePosition is defined.
+ * Can only be specified if lastObjectStorePosition is defined.
*
* Must either be undefined or within range.
*/
@@ -92,8 +71,6 @@ export interface RecordGetRequest {
/**
* If specified, the index key of the results must be
* greater or equal to advanceIndexKey.
- *
- * Only applicable if indexName is specified.
*/
advanceIndexKey?: IDBValidKey;
/**
@@ -109,7 +86,31 @@ export interface RecordGetRequest {
resultLevel: ResultLevel;
}
-/** @public */
+export interface ObjectStoreGetQuery {
+ direction: IDBCursorDirection;
+ objectStoreName: string;
+ /**
+ * The range of keys to return.
+ * Refers to the object store keys.
+ */
+ range: BridgeIDBKeyRange | undefined | null;
+ /**
+ * Last position in terms of the object store key.
+ */
+ lastObjectStorePosition?: IDBValidKey;
+ /**
+ * If specified, the primary key of the results must be greater
+ * or equal to advancePrimaryKey.
+ */
+ advancePrimaryKey?: IDBValidKey;
+ /**
+ * Maximum number of results to return.
+ * If 0, return all available results
+ */
+ limit: number;
+ resultLevel: ResultLevel;
+}
+
export interface RecordGetResponse {
values: any[] | undefined;
indexKeys: IDBValidKey[] | undefined;
@@ -117,7 +118,6 @@ export interface RecordGetResponse {
count: number;
}
-/** @public */
export interface RecordStoreRequest {
objectStoreName: string;
value: any;
@@ -125,7 +125,6 @@ export interface RecordStoreRequest {
storeLevel: StoreLevel;
}
-/** @public */
export interface RecordStoreResponse {
/**
* Key that the record was stored under in the object store.
@@ -133,38 +132,79 @@ export interface RecordStoreResponse {
key: IDBValidKey;
}
-/** @public */
+export interface ObjectStoreMeta {
+ indexSet: string[];
+ keyPath: string | string[] | null;
+ autoIncrement: boolean;
+}
+
+export interface IndexMeta {
+ keyPath: string | string[];
+ multiEntry: boolean;
+ unique: boolean;
+}
+
+// FIXME: Instead of refering to an object store by name,
+// maybe refer to it via some internal, numeric ID?
+// This would simplify renaming.
export interface Backend {
getDatabases(): Promise<BridgeIDBDatabaseInfo[]>;
- connectDatabase(name: string): Promise<DatabaseConnection>;
+ connectDatabase(name: string): Promise<ConnectResult>;
beginTransaction(
- conn: DatabaseConnection,
+ dbConn: DatabaseConnection,
objectStores: string[],
mode: IDBTransactionMode,
): Promise<DatabaseTransaction>;
enterVersionChange(
- conn: DatabaseConnection,
+ dbConn: DatabaseConnection,
newVersion: number,
): Promise<DatabaseTransaction>;
deleteDatabase(name: string): Promise<void>;
- close(db: DatabaseConnection): Promise<void>;
+ close(dbConn: DatabaseConnection): Promise<void>;
- getSchema(db: DatabaseConnection): Schema;
+ // FIXME: Use this for connection
+ // prepareConnect() - acquires a lock, maybe enters a version change transaction?
+ // finishConnect() - after possible versionchange is done, allow others to connect
- getCurrentTransactionSchema(btx: DatabaseTransaction): Schema;
+ /**
+ * Get metadata for an object store.
+ *
+ * When dbConn is running a version change transaction,
+ * the current schema (and not the initial schema) is returned.
+ *
+ * Caller may mutate the result, a new object
+ * is returned on each call.
+ */
+ getObjectStoreMeta(
+ dbConn: DatabaseConnection,
+ objectStoreName: string,
+ ): ObjectStoreMeta | undefined;
- getInitialTransactionSchema(btx: DatabaseTransaction): Schema;
+ /**
+ * Get metadata for an index.
+ *
+ * When dbConn is running a version change transaction,
+ * the current schema (and not the initial schema) is returned.
+ *
+ * Caller may mutate the result, a new object
+ * is returned on each call.
+ */
+ getIndexMeta(
+ dbConn: DatabaseConnection,
+ objectStoreName: string,
+ indexName: string,
+ ): IndexMeta | undefined;
renameIndex(
btx: DatabaseTransaction,
objectStoreName: string,
- oldName: string,
- newName: string,
+ oldIndexName: string,
+ newIndexName: string,
): void;
deleteIndex(
@@ -173,8 +213,9 @@ export interface Backend {
indexName: string,
): void;
- rollback(btx: DatabaseTransaction): Promise<void>;
+ rollback(btx: DatabaseTransaction): void;
+ // FIXME: Should probably not be async
commit(btx: DatabaseTransaction): Promise<void>;
deleteObjectStore(btx: DatabaseTransaction, name: string): void;
@@ -207,9 +248,14 @@ export interface Backend {
range: BridgeIDBKeyRange,
): Promise<void>;
- getRecords(
+ getObjectStoreRecords(
+ btx: DatabaseTransaction,
+ req: ObjectStoreGetQuery,
+ ): Promise<RecordGetResponse>;
+
+ getIndexRecords(
btx: DatabaseTransaction,
- req: RecordGetRequest,
+ req: IndexGetQuery,
): Promise<RecordGetResponse>;
storeRecord(
diff --git a/packages/idb-bridge/src/backends.test.ts b/packages/idb-bridge/src/backends.test.ts
new file mode 100644
index 000000000..684358eac
--- /dev/null
+++ b/packages/idb-bridge/src/backends.test.ts
@@ -0,0 +1,740 @@
+/*
+ Copyright 2019 Florian Dold
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ or implied. See the License for the specific language governing
+ permissions and limitations under the License.
+ */
+
+/**
+ * Tests that are backend-generic.
+ * See testingdb.ts for the backend selection in test runs.
+ */
+
+/**
+ * Imports.
+ */
+import test from "ava";
+import {
+ BridgeIDBCursorWithValue,
+ BridgeIDBDatabase,
+ BridgeIDBFactory,
+ BridgeIDBKeyRange,
+ BridgeIDBTransaction,
+} from "./bridge-idb.js";
+import {
+ IDBCursorDirection,
+ IDBCursorWithValue,
+ IDBDatabase,
+ IDBKeyRange,
+ IDBRequest,
+ IDBValidKey,
+} from "./idbtypes.js";
+import { initTestIndexedDB, useTestIndexedDb } from "./testingdb.js";
+import { MemoryBackend } from "./MemoryBackend.js";
+import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js";
+
+test.before("test DB initialization", initTestIndexedDB);
+
+test("Spec: Example 1 Part 1", async (t) => {
+ const idb = useTestIndexedDb();
+
+ const dbname = "library-" + new Date().getTime() + Math.random();
+
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result as BridgeIDBDatabase;
+ const store = db.createObjectStore("books", { keyPath: "isbn" });
+ const titleIndex = store.createIndex("by_title", "title", { unique: true });
+ const authorIndex = store.createIndex("by_author", "author");
+
+ // Populate with initial data.
+ store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
+ store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
+ store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
+ };
+
+ await promiseFromRequest(request);
+ t.pass();
+});
+
+test("Spec: Example 1 Part 2", async (t) => {
+ const idb = useTestIndexedDb();
+
+ const dbname = "library-" + new Date().getTime() + Math.random();
+
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("books", { keyPath: "isbn" });
+ const titleIndex = store.createIndex("by_title", "title", { unique: true });
+ const authorIndex = store.createIndex("by_author", "author");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+
+ t.is(db.name, dbname);
+
+ const tx = db.transaction("books", "readwrite");
+ tx.oncomplete = () => {
+ console.log("oncomplete called");
+ };
+
+ const store = tx.objectStore("books");
+
+ store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
+ store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
+ store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
+
+ await promiseFromTransaction(tx);
+
+ t.pass();
+});
+
+test("duplicate index insertion", async (t) => {
+ const idb = useTestIndexedDb();
+
+ const dbname = "library-" + new Date().getTime() + Math.random();
+
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("books", { keyPath: "isbn" });
+ const titleIndex = store.createIndex("by_title", "title", { unique: true });
+ const authorIndex = store.createIndex("by_author", "author");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+
+ t.is(db.name, dbname);
+
+ const tx = db.transaction("books", "readwrite");
+ tx.oncomplete = () => {
+ console.log("oncomplete called");
+ };
+
+ const store = tx.objectStore("books");
+
+ store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
+
+ // Change the index key, keep primary key (isbn) the same.
+ store.put({ title: "Water Buffaloes", author: "Bla", isbn: 234567 });
+ store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
+
+ store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
+
+ await promiseFromTransaction(tx);
+
+ const tx3 = db.transaction(["books"], "readonly");
+ const store3 = tx3.objectStore("books");
+ const index3 = store3.index("by_author");
+ const request3 = index3.openCursor();
+
+ const authorList: string[] = [];
+
+ await promiseFromRequest(request3);
+ while (request3.result != null) {
+ const cursor: IDBCursorWithValue = request3.result;
+ authorList.push(cursor.value.author);
+ cursor.continue();
+ await promiseFromRequest(request3);
+ }
+
+ t.deepEqual(authorList, ["Barney", "Fred", "Fred"]);
+
+ t.pass();
+});
+
+test("simple index iteration", async (t) => {
+ const idb = useTestIndexedDb();
+ const dbname = "library-" + new Date().getTime() + Math.random();
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("books", { keyPath: "isbn" });
+ const titleIndex = store.createIndex("by_title", "title", { unique: true });
+ const authorIndex = store.createIndex("by_author", "author");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+ const tx = db.transaction("books", "readwrite");
+ const store = tx.objectStore("books");
+
+ store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
+ store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
+ store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
+
+ await promiseFromTransaction(tx);
+
+ const tx3 = db.transaction(["books"], "readonly");
+ const store3 = tx3.objectStore("books");
+ const index3 = store3.index("by_author");
+ const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
+
+ await promiseFromRequest(request3);
+
+ let cursor: BridgeIDBCursorWithValue | null;
+ cursor = request3.result as BridgeIDBCursorWithValue;
+ t.is(cursor.value.author, "Fred");
+ t.is(cursor.value.isbn, 123456);
+
+ cursor.continue();
+
+ await promiseFromRequest(request3);
+
+ t.is(cursor.value.author, "Fred");
+ t.is(cursor.value.isbn, 234567);
+
+ cursor.continue();
+
+ await promiseFromRequest(request3);
+
+ t.is(cursor.value, undefined);
+});
+
+test("Spec: Example 1 Part 3", async (t) => {
+ const idb = useTestIndexedDb();
+ const dbname = "library-" + new Date().getTime() + Math.random();
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("books", { keyPath: "isbn" });
+ const titleIndex = store.createIndex("by_title", "title", { unique: true });
+ const authorIndex = store.createIndex("by_author", "author");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+
+ t.is(db.name, dbname);
+
+ const tx = db.transaction("books", "readwrite");
+
+ const store = tx.objectStore("books");
+
+ store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
+ store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
+ store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
+
+ await promiseFromTransaction(tx);
+
+ const tx2 = db.transaction("books", "readonly");
+ const store2 = tx2.objectStore("books");
+ var index2 = store2.index("by_title");
+ const request2 = index2.get("Bedrock Nights");
+ const result2: any = await promiseFromRequest(request2);
+
+ t.is(result2.author, "Barney");
+
+ const tx3 = db.transaction(["books"], "readonly");
+ const store3 = tx3.objectStore("books");
+ const index3 = store3.index("by_author");
+ const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
+
+ await promiseFromRequest(request3);
+
+ let cursor: BridgeIDBCursorWithValue | null;
+ cursor = request3.result as BridgeIDBCursorWithValue;
+ t.is(cursor.value.author, "Fred");
+ t.is(cursor.value.isbn, 123456);
+
+ cursor.continue();
+
+ await promiseFromRequest(request3);
+
+ cursor = request3.result as BridgeIDBCursorWithValue;
+ t.is(cursor.value.author, "Fred");
+ t.is(cursor.value.isbn, 234567);
+
+ await promiseFromTransaction(tx3);
+
+ const tx4 = db.transaction("books", "readonly");
+ const store4 = tx4.objectStore("books");
+ const request4 = store4.openCursor();
+
+ await promiseFromRequest(request4);
+
+ cursor = request4.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.isbn, 123456);
+
+ cursor.continue();
+
+ await promiseFromRequest(request4);
+
+ cursor = request4.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.isbn, 234567);
+
+ cursor.continue();
+
+ await promiseFromRequest(request4);
+
+ cursor = request4.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.isbn, 345678);
+
+ cursor.continue();
+ await promiseFromRequest(request4);
+
+ cursor = request4.result;
+
+ t.is(cursor, null);
+
+ const tx5 = db.transaction("books", "readonly");
+ const store5 = tx5.objectStore("books");
+ const index5 = store5.index("by_author");
+
+ const request5 = index5.openCursor(null, "next");
+
+ await promiseFromRequest(request5);
+ cursor = request5.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.author, "Barney");
+ cursor.continue();
+
+ await promiseFromRequest(request5);
+ cursor = request5.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.author, "Fred");
+ cursor.continue();
+
+ await promiseFromRequest(request5);
+ cursor = request5.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.author, "Fred");
+ cursor.continue();
+
+ await promiseFromRequest(request5);
+ cursor = request5.result;
+ t.is(cursor, null);
+
+ const request6 = index5.openCursor(null, "nextunique");
+
+ await promiseFromRequest(request6);
+ cursor = request6.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.author, "Barney");
+ cursor.continue();
+
+ await promiseFromRequest(request6);
+ cursor = request6.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.author, "Fred");
+ t.is(cursor.value.isbn, 123456);
+ cursor.continue();
+
+ await promiseFromRequest(request6);
+ cursor = request6.result;
+ t.is(cursor, null);
+
+ console.log("---------------------------");
+
+ const request7 = index5.openCursor(null, "prevunique");
+ await promiseFromRequest(request7);
+ cursor = request7.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.author, "Fred");
+ t.is(cursor.value.isbn, 123456);
+ cursor.continue();
+
+ await promiseFromRequest(request7);
+ cursor = request7.result;
+ if (!cursor) {
+ throw new Error();
+ }
+ t.is(cursor.value.author, "Barney");
+ cursor.continue();
+
+ await promiseFromRequest(request7);
+ cursor = request7.result;
+ t.is(cursor, null);
+
+ db.close();
+
+ t.pass();
+});
+
+test("simple deletion", async (t) => {
+ const idb = useTestIndexedDb();
+ const dbname = "library-" + new Date().getTime() + Math.random();
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("books", { keyPath: "isbn" });
+ const titleIndex = store.createIndex("by_title", "title", { unique: true });
+ const authorIndex = store.createIndex("by_author", "author");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+ const tx = db.transaction("books", "readwrite");
+ tx.oncomplete = () => {
+ console.log("oncomplete called");
+ };
+
+ const store = tx.objectStore("books");
+
+ store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
+ store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
+ store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
+
+ await promiseFromTransaction(tx);
+
+ const tx2 = db.transaction("books", "readwrite");
+
+ const store2 = tx2.objectStore("books");
+
+ const req1 = store2.get(234567);
+ await promiseFromRequest(req1);
+ t.is(req1.readyState, "done");
+ t.is(req1.result.author, "Fred");
+
+ store2.delete(123456);
+
+ const req2 = store2.get(123456);
+ await promiseFromRequest(req2);
+ t.is(req2.readyState, "done");
+ t.is(req2.result, undefined);
+
+ const req3 = store2.get(234567);
+ await promiseFromRequest(req3);
+ t.is(req3.readyState, "done");
+ t.is(req3.result.author, "Fred");
+
+ await promiseFromTransaction(tx2);
+
+ t.pass();
+});
+
+test("export", async (t) => {
+ const backend = new MemoryBackend();
+ const idb = new BridgeIDBFactory(backend);
+ const dbname = "library-" + new Date().getTime() + Math.random();
+ const request = idb.open(dbname, 42);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("books", { keyPath: "isbn" });
+ const titleIndex = store.createIndex("by_title", "title", { unique: true });
+ const authorIndex = store.createIndex("by_author", "author");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+
+ const tx = db.transaction("books", "readwrite");
+ tx.oncomplete = () => {
+ console.log("oncomplete called");
+ };
+
+ const store = tx.objectStore("books");
+
+ store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
+ store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
+ store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
+
+ await promiseFromTransaction(tx);
+
+ const exportedData = backend.exportDump();
+ const backend2 = new MemoryBackend();
+ backend2.importDump(exportedData);
+ const exportedData2 = backend2.exportDump();
+
+ t.assert(
+ exportedData.databases[dbname].objectStores["books"].records.length ===
+ 3,
+ );
+ t.deepEqual(exportedData, exportedData2);
+
+ t.is(exportedData.databases[dbname].schema.databaseVersion, 42);
+ t.is(exportedData2.databases[dbname].schema.databaseVersion, 42);
+ t.pass();
+});
+
+test("update with non-existent index values", async (t) => {
+ const idb = useTestIndexedDb();
+ const dbname = "mydb-" + new Date().getTime() + Math.random();
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("bla", { keyPath: "x" });
+ store.createIndex("by_y", "y");
+ store.createIndex("by_z", "z");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+
+ t.is(db.name, dbname);
+
+ {
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+ store.put({ x: 0, y: "a", z: 42 });
+ const index = store.index("by_z");
+ const indRes = await promiseFromRequest(index.get(42));
+ t.is(indRes.x, 0);
+ const res = await promiseFromRequest(store.get(0));
+ t.is(res.z, 42);
+ await promiseFromTransaction(tx);
+ }
+
+ {
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+ store.put({ x: 0, y: "a" });
+ const res = await promiseFromRequest(store.get(0));
+ t.is(res.z, undefined);
+ await promiseFromTransaction(tx);
+ }
+
+ {
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+ const index = store.index("by_z");
+ {
+ const indRes = await promiseFromRequest(index.get(42));
+ t.is(indRes, undefined);
+ }
+ const res = await promiseFromRequest(store.get(0));
+ t.is(res.z, undefined);
+ await promiseFromTransaction(tx);
+ }
+
+ t.pass();
+});
+
+test("delete from unique index", async (t) => {
+ const idb = useTestIndexedDb();
+ const dbname = "mydb-" + new Date().getTime() + Math.random();
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result as IDBDatabase;
+ const store = db.createObjectStore("bla", { keyPath: "x" });
+ store.createIndex("by_yz", ["y", "z"], {
+ unique: true,
+ });
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+
+ t.is(db.name, dbname);
+
+ {
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+ store.put({ x: 0, y: "a", z: 42 });
+ const index = store.index("by_yz");
+ const indRes = await promiseFromRequest(index.get(["a", 42]));
+ t.is(indRes.x, 0);
+ const res = await promiseFromRequest(store.get(0));
+ t.is(res.z, 42);
+ await promiseFromTransaction(tx);
+ }
+
+ {
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+ store.put({ x: 0, y: "a", z: 42, extra: 123 });
+ await promiseFromTransaction(tx);
+ }
+
+ t.pass();
+});
+
+test("range queries", async (t) => {
+ const idb = useTestIndexedDb();
+ const dbname = "mydb-" + new Date().getTime() + Math.random();
+ const request = idb.open(dbname);
+ request.onupgradeneeded = () => {
+ const db = request.result;
+ const store = db.createObjectStore("bla", { keyPath: "x" });
+ store.createIndex("by_y", "y");
+ store.createIndex("by_z", "z");
+ };
+
+ const db: BridgeIDBDatabase = await promiseFromRequest(request);
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+
+ store.put({ x: 0, y: "a" });
+ store.put({ x: 2, y: "a" });
+ store.put({ x: 4, y: "b" });
+ store.put({ x: 8, y: "b" });
+ store.put({ x: 10, y: "c" });
+ store.put({ x: 12, y: "c" });
+
+ await promiseFromTransaction(tx);
+
+ async function doCursorStoreQuery(
+ range: IDBKeyRange | IDBValidKey | undefined,
+ direction: IDBCursorDirection | undefined,
+ expected: any[],
+ ): Promise<void> {
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+ const vals: any[] = [];
+
+ const req = store.openCursor(range, direction);
+ while (1) {
+ await promiseFromRequest(req);
+ const cursor: IDBCursorWithValue = req.result;
+ if (!cursor) {
+ break;
+ }
+ cursor.continue();
+ vals.push(cursor.value);
+ }
+
+ await promiseFromTransaction(tx);
+
+ t.deepEqual(vals, expected);
+ }
+
+ async function doCursorIndexQuery(
+ range: IDBKeyRange | IDBValidKey | undefined,
+ direction: IDBCursorDirection | undefined,
+ expected: any[],
+ ): Promise<void> {
+ const tx = db.transaction("bla", "readwrite");
+ const store = tx.objectStore("bla");
+ const index = store.index("by_y");
+ const vals: any[] = [];
+
+ const req = index.openCursor(range, direction);
+ while (1) {
+ await promiseFromRequest(req);
+ const cursor: IDBCursorWithValue = req.result;
+ if (!cursor) {
+ break;
+ }
+ cursor.continue();
+ vals.push(cursor.value);
+ }
+
+ await promiseFromTransaction(tx);
+
+ t.deepEqual(vals, expected);
+ }
+
+ await doCursorStoreQuery(undefined, undefined, [
+ {
+ x: 0,
+ y: "a",
+ },
+ {
+ x: 2,
+ y: "a",
+ },
+ {
+ x: 4,
+ y: "b",
+ },
+ {
+ x: 8,
+ y: "b",
+ },
+ {
+ x: 10,
+ y: "c",
+ },
+ {
+ x: 12,
+ y: "c",
+ },
+ ]);
+
+ await doCursorStoreQuery(
+ BridgeIDBKeyRange.bound(0, 12, true, true),
+ undefined,
+ [
+ {
+ x: 2,
+ y: "a",
+ },
+ {
+ x: 4,
+ y: "b",
+ },
+ {
+ x: 8,
+ y: "b",
+ },
+ {
+ x: 10,
+ y: "c",
+ },
+ ],
+ );
+
+ await doCursorIndexQuery(
+ BridgeIDBKeyRange.bound("a", "c", true, true),
+ undefined,
+ [
+ {
+ x: 4,
+ y: "b",
+ },
+ {
+ x: 8,
+ y: "b",
+ },
+ ],
+ );
+
+ await doCursorIndexQuery(undefined, "nextunique", [
+ {
+ x: 0,
+ y: "a",
+ },
+ {
+ x: 4,
+ y: "b",
+ },
+ {
+ x: 10,
+ y: "c",
+ },
+ ]);
+
+ await doCursorIndexQuery(undefined, "prevunique", [
+ {
+ x: 10,
+ y: "c",
+ },
+ {
+ x: 4,
+ y: "b",
+ },
+ {
+ x: 0,
+ y: "a",
+ },
+ ]);
+
+ db.close();
+
+ t.pass();
+});
diff --git a/packages/idb-bridge/src/bridge-idb.ts b/packages/idb-bridge/src/bridge-idb.ts
index 128a6900d..8cecba534 100644
--- a/packages/idb-bridge/src/bridge-idb.ts
+++ b/packages/idb-bridge/src/bridge-idb.ts
@@ -17,12 +17,16 @@
import {
Backend,
+ ConnectResult,
DatabaseConnection,
DatabaseTransaction,
- RecordGetRequest,
+ IndexGetQuery,
+ IndexMeta,
+ ObjectStoreGetQuery,
+ ObjectStoreMeta,
+ RecordGetResponse,
RecordStoreRequest,
ResultLevel,
- Schema,
StoreLevel,
} from "./backend-interface.js";
import {
@@ -57,10 +61,7 @@ import {
TransactionInactiveError,
VersionError,
} from "./util/errors.js";
-import {
- FakeDOMStringList,
- fakeDOMStringList,
-} from "./util/fakeDOMStringList.js";
+import { fakeDOMStringList } from "./util/fakeDOMStringList.js";
import FakeEvent from "./util/FakeEvent.js";
import FakeEventTarget from "./util/FakeEventTarget.js";
import { makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
@@ -71,17 +72,14 @@ import { checkStructuredCloneOrThrow } from "./util/structuredClone.js";
import { validateKeyPath } from "./util/validateKeyPath.js";
import { valueToKey } from "./util/valueToKey.js";
-/** @public */
export type CursorSource = BridgeIDBIndex | BridgeIDBObjectStore;
-/** @public */
export interface RequestObj {
operation: () => Promise<any>;
request?: BridgeIDBRequest | undefined;
source?: any;
}
-/** @public */
export interface BridgeIDBDatabaseInfo {
name: string;
version: number;
@@ -101,8 +99,6 @@ function simplifyRange(
/**
* http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#cursor
- *
- * @public
*/
export class BridgeIDBCursor implements IDBCursor {
_request: BridgeIDBRequest | undefined;
@@ -207,29 +203,56 @@ export class BridgeIDBCursor implements IDBCursor {
);
BridgeIDBFactory.enableTracing &&
console.log("cursor type ", this.toString());
- const isIndex = this._indexName !== undefined;
- const recordGetRequest: RecordGetRequest = {
- direction: this.direction,
- indexName: this._indexName,
- lastIndexPosition: this._indexPosition,
- lastObjectStorePosition: this._objectStorePosition,
- limit: 1,
- range: simplifyRange(this._range),
- objectStoreName: this._objectStoreName,
- advanceIndexKey: isIndex ? key : undefined,
- advancePrimaryKey: isIndex ? primaryKey : key,
- resultLevel: this._keyOnly ? ResultLevel.OnlyKeys : ResultLevel.Full,
- };
+ const indexName = this._indexName;
const { btx } = this.source._confirmStartedBackendTransaction();
- let response = await this._backend.getRecords(btx, recordGetRequest);
+ let response: RecordGetResponse;
+
+ if (indexName != null) {
+ const indexRecordGetRequest: IndexGetQuery = {
+ direction: this.direction,
+ indexName: indexName,
+ lastIndexPosition: this._indexPosition,
+ lastObjectStorePosition: this._objectStorePosition,
+ limit: 1,
+ range: simplifyRange(this._range),
+ objectStoreName: this._objectStoreName,
+ advanceIndexKey: key,
+ advancePrimaryKey: primaryKey,
+ resultLevel: this._keyOnly ? ResultLevel.OnlyKeys : ResultLevel.Full,
+ };
+ response = await this._backend.getIndexRecords(
+ btx,
+ indexRecordGetRequest,
+ );
+ } else {
+ if (primaryKey != null) {
+ // Only allowed for index cursors
+ throw new InvalidAccessError();
+ }
+ const objStoreGetRequest: ObjectStoreGetQuery = {
+ direction: this.direction,
+ lastObjectStorePosition: this._objectStorePosition,
+ limit: 1,
+ range: simplifyRange(this._range),
+ objectStoreName: this._objectStoreName,
+ advancePrimaryKey: key,
+ resultLevel: this._keyOnly ? ResultLevel.OnlyKeys : ResultLevel.Full,
+ };
+ response = await this._backend.getObjectStoreRecords(
+ btx,
+ objStoreGetRequest,
+ );
+ }
if (response.count === 0) {
if (BridgeIDBFactory.enableTracing) {
console.log("cursor is returning empty result");
}
this._gotValue = false;
+ this._key = undefined;
+ this._value = undefined;
return null;
}
@@ -237,11 +260,6 @@ export class BridgeIDBCursor implements IDBCursor {
throw Error("invariant failed");
}
- if (BridgeIDBFactory.enableTracing) {
- console.log("request is:", JSON.stringify(recordGetRequest));
- console.log("get response is:", JSON.stringify(response));
- }
-
if (this._indexName !== undefined) {
this._key = response.indexKeys![0];
} else {
@@ -550,7 +568,6 @@ const confirmActiveVersionchangeTransaction = (database: BridgeIDBDatabase) => {
};
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#database-interface
-/** @public */
export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
_closePending = false;
_closed = false;
@@ -561,7 +578,16 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
_backendConnection: DatabaseConnection;
_backend: Backend;
- _schema: Schema;
+ _name: string;
+
+ _initialVersion: number;
+
+ _version: number;
+
+ // "object store set" from the spec
+ _objectStoreSet: string[];
+
+ // _schema: Schema;
/**
* Name that can be set to identify the object store in logs.
@@ -569,17 +595,15 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
_debugName: string | undefined = undefined;
get name(): string {
- return this._schema.databaseName;
+ return this._name;
}
get version(): number {
- return this._schema.databaseVersion;
+ return this._version;
}
get objectStoreNames(): DOMStringList {
- return fakeDOMStringList(
- Object.keys(this._schema.objectStores),
- ).sort() as DOMStringList;
+ return fakeDOMStringList([...this._objectStoreSet]).sort() as DOMStringList;
}
/**
@@ -606,13 +630,13 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
}
}
- constructor(backend: Backend, backendConnection: DatabaseConnection) {
+ constructor(name: string, backend: Backend, connResult: ConnectResult) {
super();
-
- this._schema = backend.getSchema(backendConnection);
-
+ this._name = name;
+ this._version = this._initialVersion = connResult.version;
this._backend = backend;
- this._backendConnection = backendConnection;
+ this._backendConnection = connResult.conn;
+ this._objectStoreSet = connResult.objectStores;
}
// http://w3c.github.io/IndexedDB/#dom-idbdatabase-createobjectstore
@@ -645,7 +669,8 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
validateKeyPath(keyPath);
}
- if (Object.keys(this._schema.objectStores).includes(name)) {
+ if (this._objectStoreSet.includes(name)) {
+ // Already exists
throw new ConstraintError();
}
@@ -660,7 +685,9 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
autoIncrement,
);
- this._schema = this._backend.getCurrentTransactionSchema(backendTx);
+ transaction._scope.add(name);
+ this._objectStoreSet.push(name);
+ this._objectStoreSet.sort();
const newObjectStore = transaction.objectStore(name);
newObjectStore._justCreated = true;
@@ -682,6 +709,10 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
os._deleted = true;
transaction._objectStoresCache.delete(name);
}
+ transaction._cachedObjectStoreNames = undefined;
+ transaction._scope.delete(name);
+ const nameIdx = this._objectStoreSet.indexOf(name);
+ this._objectStoreSet.splice(nameIdx, 1);
}
public _internalTransaction(
@@ -766,10 +797,8 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase {
}
}
-/** @public */
export type DatabaseList = Array<{ name: string; version: number }>;
-/** @public */
export class BridgeIDBFactory {
public cmp = compareKeys;
private backend: Backend;
@@ -810,8 +839,10 @@ export class BridgeIDBFactory {
});
request.dispatchEvent(event2);
} catch (err: any) {
- request.error = new Error();
- request.error.name = err.name;
+ const myErr = new Error();
+ myErr.name = err.name;
+ myErr.message = err.message;
+ request.error = myErr;
request.readyState = "done";
const event = new FakeEvent("error", {
@@ -841,27 +872,26 @@ export class BridgeIDBFactory {
const request = new BridgeIDBOpenDBRequest();
queueTask(async () => {
- let dbconn: DatabaseConnection;
+ let dbConnRes: ConnectResult;
try {
if (BridgeIDBFactory.enableTracing) {
console.log("TRACE: connecting to database");
}
- dbconn = await this.backend.connectDatabase(name);
+ dbConnRes = await this.backend.connectDatabase(name);
if (BridgeIDBFactory.enableTracing) {
console.log("TRACE: connected!");
}
} catch (err: any) {
if (BridgeIDBFactory.enableTracing) {
console.log(
- "TRACE: caught exception while trying to connect with backend",
+ "TRACE: caught exception while trying to connect with backend:",
+ err,
);
}
request._finishWithError(err);
return;
}
-
- const schema = this.backend.getSchema(dbconn);
- const existingVersion = schema.databaseVersion;
+ const existingVersion = dbConnRes.version;
if (version === undefined) {
version = existingVersion !== 0 ? existingVersion : 1;
@@ -879,7 +909,7 @@ export class BridgeIDBFactory {
return;
}
- const db = new BridgeIDBDatabase(this.backend, dbconn);
+ const db = new BridgeIDBDatabase(name, this.backend, dbConnRes);
if (existingVersion == requestedVersion) {
request.result = db;
@@ -929,16 +959,14 @@ export class BridgeIDBFactory {
}
const backendTransaction = await this.backend.enterVersionChange(
- dbconn,
+ dbConnRes.conn,
requestedVersion,
);
// We need to expose the new version number to the upgrade transaction.
- db._schema =
- this.backend.getCurrentTransactionSchema(backendTransaction);
-
+ db._version = version;
const transaction = db._internalTransaction(
- [],
+ dbConnRes.objectStores,
"versionchange",
backendTransaction,
request,
@@ -1030,37 +1058,48 @@ export class BridgeIDBFactory {
}
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#idl-def-IDBIndex
-/** @public */
export class BridgeIDBIndex implements IDBIndex {
_objectStore: BridgeIDBObjectStore;
+ _indexMeta: IndexMeta;
+ _originalName: string | undefined = undefined;
+ _deleted: boolean = false;
+ _name: string;
+
+ /**
+ * Was this index newly created in the current transaction?
+ */
+ _justCreated: boolean = false;
get objectStore(): IDBObjectStore {
return this._objectStore;
}
- get _schema(): Schema {
- return this._objectStore._transaction._db._schema;
- }
-
get keyPath(): IDBKeyPath | IDBKeyPath[] {
- return this._schema.objectStores[this._objectStore.name].indexes[this._name]
- .keyPath;
+ return this._indexMeta.keyPath;
}
get multiEntry(): boolean {
- return this._schema.objectStores[this._objectStore.name].indexes[this._name]
- .multiEntry;
+ return this._indexMeta.multiEntry;
}
get unique(): boolean {
- return this._schema.objectStores[this._objectStore.name].indexes[this._name]
- .unique;
+ return this._indexMeta.multiEntry;
}
get _backend(): Backend {
return this._objectStore._backend;
}
+ constructor(
+ objectStore: BridgeIDBObjectStore,
+ name: string,
+ indexMeta: IndexMeta,
+ ) {
+ this._name = name;
+ this._objectStore = objectStore;
+ this._indexMeta = indexMeta;
+ }
+
_confirmStartedBackendTransaction(): { btx: DatabaseTransaction } {
return this._objectStore._confirmStartedBackendTransaction();
}
@@ -1069,20 +1108,6 @@ export class BridgeIDBIndex implements IDBIndex {
this._objectStore._confirmActiveTransaction();
}
- private _name: string;
-
- public _deleted: boolean = false;
-
- /**
- * Was this index newly created in the current transaction?
- */
- _justCreated: boolean = false;
-
- constructor(objectStore: BridgeIDBObjectStore, name: string) {
- this._name = name;
- this._objectStore = objectStore;
- }
-
get name() {
return this._name;
}
@@ -1107,18 +1132,39 @@ export class BridgeIDBIndex implements IDBIndex {
if (newName === oldName) {
return;
}
-
+ if (this._originalName != null) {
+ this._originalName = oldName;
+ }
this._backend.renameIndex(btx, this._objectStore.name, oldName, newName);
+ this._applyNameChange(oldName, newName);
+ if (this._objectStore._objectStoreMeta.indexSet.indexOf(name) >= 0) {
+ throw new Error("internal invariant violated");
+ }
+ }
- this._objectStore._transaction._db._schema =
- this._backend.getCurrentTransactionSchema(btx);
-
- this._objectStore._indexesCache.delete(oldName);
- this._objectStore._indexesCache.set(newName, this);
+ _applyNameChange(oldName: string, newName: string) {
+ this._objectStore._indexHandlesCache.delete(oldName);
+ this._objectStore._indexHandlesCache.set(newName, this);
+ const indexSet = this._objectStore._objectStoreMeta.indexSet;
+ const indexIdx = indexSet.indexOf(oldName);
+ indexSet[indexIdx] = newName;
+ indexSet.sort();
this._name = newName;
+ }
- if (this._objectStore._indexNames.indexOf(name) >= 0) {
- throw new Error("internal invariant violated");
+ _applyDelete() {
+ this._objectStore._indexHandlesCache.delete(this._name);
+ const indexSet = this._objectStore._objectStoreMeta.indexSet;
+ const indexIdx = indexSet.indexOf(this._name);
+ indexSet.splice(indexIdx, 1);
+ }
+
+ _abort() {
+ if (this._originalName != null) {
+ this._applyNameChange(this._name, this._originalName);
+ }
+ if (this._justCreated) {
+ this._deleted = true;
}
}
@@ -1199,34 +1245,23 @@ export class BridgeIDBIndex implements IDBIndex {
}
private _confirmIndexExists() {
- const storeSchema = this._schema.objectStores[this._objectStore._name];
- if (!storeSchema) {
- throw new InvalidStateError(
- `no schema for object store '${this._objectStore._name}'`,
- );
- }
- if (!storeSchema.indexes[this._name]) {
- throw new InvalidStateError(
- `no schema for index '${this._name}' of object store '${this._objectStore._name}'`,
- );
- }
- }
-
- get(key: BridgeIDBKeyRange | IDBValidKey) {
if (this._deleted) {
throw new InvalidStateError();
}
if (this._objectStore._deleted) {
throw new InvalidStateError();
}
- this._confirmActiveTransaction();
+ }
+
+ get(key: BridgeIDBKeyRange | IDBValidKey) {
this._confirmIndexExists();
+ this._confirmActiveTransaction();
if (!(key instanceof BridgeIDBKeyRange)) {
key = BridgeIDBKeyRange._valueToKeyRange(key);
}
- const getReq: RecordGetRequest = {
+ const getReq: IndexGetQuery = {
direction: "next",
indexName: this._name,
limit: 1,
@@ -1237,7 +1272,7 @@ export class BridgeIDBIndex implements IDBIndex {
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, getReq);
+ const result = await this._backend.getIndexRecords(btx, getReq);
if (result.count == 0) {
return undefined;
}
@@ -1273,7 +1308,7 @@ export class BridgeIDBIndex implements IDBIndex {
count = -1;
}
- const getReq: RecordGetRequest = {
+ const getReq: IndexGetQuery = {
direction: "next",
indexName: this._name,
limit: count,
@@ -1284,7 +1319,7 @@ export class BridgeIDBIndex implements IDBIndex {
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, getReq);
+ const result = await this._backend.getIndexRecords(btx, getReq);
const values = result.values;
if (!values) {
throw Error("invariant violated");
@@ -1307,7 +1342,7 @@ export class BridgeIDBIndex implements IDBIndex {
key = BridgeIDBKeyRange._valueToKeyRange(key);
}
- const getReq: RecordGetRequest = {
+ const getReq: IndexGetQuery = {
direction: "next",
indexName: this._name,
limit: 1,
@@ -1318,7 +1353,7 @@ export class BridgeIDBIndex implements IDBIndex {
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, getReq);
+ const result = await this._backend.getIndexRecords(btx, getReq);
if (result.count == 0) {
return undefined;
}
@@ -1351,7 +1386,7 @@ export class BridgeIDBIndex implements IDBIndex {
count = -1;
}
- const getReq: RecordGetRequest = {
+ const getReq: IndexGetQuery = {
direction: "next",
indexName: this._name,
limit: count,
@@ -1362,7 +1397,7 @@ export class BridgeIDBIndex implements IDBIndex {
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, getReq);
+ const result = await this._backend.getIndexRecords(btx, getReq);
const primaryKeys = result.primaryKeys;
if (!primaryKeys) {
throw Error("invariant violated");
@@ -1388,7 +1423,7 @@ export class BridgeIDBIndex implements IDBIndex {
key = BridgeIDBKeyRange.only(valueToKey(key));
}
- const getReq: RecordGetRequest = {
+ const getReq: IndexGetQuery = {
direction: "next",
indexName: this._name,
limit: 1,
@@ -1399,7 +1434,7 @@ export class BridgeIDBIndex implements IDBIndex {
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, getReq);
+ const result = await this._backend.getIndexRecords(btx, getReq);
return result.count;
};
@@ -1415,7 +1450,6 @@ export class BridgeIDBIndex implements IDBIndex {
}
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#range-concept
-/** @public */
export class BridgeIDBKeyRange {
public static only(value: IDBValidKey) {
if (arguments.length === 0) {
@@ -1525,10 +1559,8 @@ export class BridgeIDBKeyRange {
}
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#object-store
-/** @public */
export class BridgeIDBObjectStore implements IDBObjectStore {
- _indexesCache: Map<string, BridgeIDBIndex> = new Map();
-
+ _indexHandlesCache: Map<string, BridgeIDBIndex> = new Map();
_transaction: BridgeIDBTransaction;
/**
@@ -1536,41 +1568,43 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
*/
_debugName: string | undefined = undefined;
+ // Was the object store (not the handle, but the underlying store)
+ // created in this upgrade transaction?
_justCreated: boolean = false;
+ _originalName: string | undefined = undefined;
+ _objectStoreMeta: ObjectStoreMeta;
+
get transaction(): IDBTransaction {
return this._transaction;
}
get autoIncrement(): boolean {
- return this._schema.objectStores[this._name].autoIncrement;
- }
-
- get _indexNames(): FakeDOMStringList {
- return fakeDOMStringList(
- Object.keys(this._schema.objectStores[this._name].indexes),
- ).sort();
+ return this._objectStoreMeta.autoIncrement;
}
get indexNames(): DOMStringList {
- return this._indexNames as DOMStringList;
+ return fakeDOMStringList([...this._objectStoreMeta.indexSet]);
}
get keyPath(): IDBKeyPath | IDBKeyPath[] {
- return this._schema.objectStores[this._name].keyPath!;
+ // Bug in th official type declarations. The spec
+ // allows returning null here.
+ return this._objectStoreMeta.keyPath!;
}
_name: string;
- get _schema(): Schema {
- return this._transaction._db._schema;
- }
-
_deleted: boolean = false;
- constructor(transaction: BridgeIDBTransaction, name: string) {
+ constructor(
+ transaction: BridgeIDBTransaction,
+ name: string,
+ objectStoreMeta: ObjectStoreMeta,
+ ) {
this._name = name;
this._transaction = transaction;
+ this._objectStoreMeta = objectStoreMeta;
}
get name() {
@@ -1620,26 +1654,56 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
let { btx } = this._confirmStartedBackendTransaction();
newName = String(newName);
-
const oldName = this._name;
-
if (newName === oldName) {
return;
}
-
+ if (this._originalName == null) {
+ this._originalName = this._name;
+ }
this._backend.renameObjectStore(btx, oldName, newName);
- this._transaction._db._schema =
- this._backend.getCurrentTransactionSchema(btx);
+ this._applyNameChange(oldName, newName);
+ }
+ _applyNameChange(oldName: string, newName: string) {
+ this._transaction._scope.delete(oldName);
+ this._transaction._scope.add(newName);
// We don't modify scope, as the scope of the transaction
// doesn't matter if we're in an upgrade transaction.
this._transaction._objectStoresCache.delete(oldName);
this._transaction._objectStoresCache.set(newName, this);
this._transaction._cachedObjectStoreNames = undefined;
-
+ const objectStoreSet = this._transaction._db._objectStoreSet;
+ const oldIdx = objectStoreSet.indexOf(oldName);
+ objectStoreSet[oldIdx] = newName;
+ objectStoreSet.sort();
this._name = newName;
}
+ _applyDelete() {
+ this._deleted = true;
+ this._transaction._objectStoresCache.delete(this._name);
+ this._transaction._cachedObjectStoreNames = undefined;
+ const objectStoreSet = this._transaction._db._objectStoreSet;
+ const oldIdx = objectStoreSet.indexOf(this._name);
+ objectStoreSet.splice(oldIdx, 1);
+ }
+
+ /**
+ * Roll back changes to the handle after an abort.
+ */
+ _abort() {
+ if (this._originalName != null) {
+ this._applyNameChange(this._name, this._originalName);
+ }
+ if (this._justCreated) {
+ this._applyDelete();
+ }
+ }
+
+ /**
+ * "To add or put with handle, value, key, and no-overwrite flag, run these steps:"
+ */
public _store(value: any, key: IDBValidKey | undefined, overwrite: boolean) {
if (BridgeIDBFactory.enableTracing) {
console.log(
@@ -1647,6 +1711,12 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
);
}
+ if (this._deleted) {
+ throw new InvalidStateError(
+ "tried to call 'put' on a deleted object store",
+ );
+ }
+
if (!this._transaction._active) {
throw new TransactionInactiveError();
}
@@ -1655,14 +1725,21 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
throw new ReadOnlyError();
}
- const { keyPath, autoIncrement } = this._schema.objectStores[this._name];
+ const { keyPath, autoIncrement } = this._objectStoreMeta;
if (key !== null && key !== undefined) {
valueToKey(key);
}
// We only call this to synchronously verify the request.
- makeStoreKeyValue(value, key, 1, autoIncrement, keyPath);
+ // FIXME: The backend should do that!
+ makeStoreKeyValue({
+ value: value,
+ key: key,
+ currentKeyGenerator: 1,
+ autoIncrement,
+ keyPath,
+ });
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
@@ -1684,11 +1761,6 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
if (arguments.length === 0) {
throw new TypeError();
}
- if (this._deleted) {
- throw new InvalidStateError(
- "tried to call 'put' on a deleted object store",
- );
- }
return this._store(value, key, true);
}
@@ -1696,9 +1768,6 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
if (arguments.length === 0) {
throw new TypeError();
}
- if (!this._schema.objectStores[this._name]) {
- throw new InvalidStateError("object store does not exist");
- }
return this._store(value, key, false);
}
@@ -1767,10 +1836,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
}
}
- const recordRequest: RecordGetRequest = {
+ const recordRequest: ObjectStoreGetQuery = {
objectStoreName: this._name,
- indexName: undefined,
- lastIndexPosition: undefined,
lastObjectStorePosition: undefined,
direction: "next",
limit: 1,
@@ -1783,7 +1850,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
console.log("running get operation:", recordRequest);
}
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, recordRequest);
+ const result = await this._backend.getObjectStoreRecords(
+ btx,
+ recordRequest,
+ );
if (BridgeIDBFactory.enableTracing) {
console.log("get operation result count:", result.count);
@@ -1833,10 +1903,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
let keyRange: BridgeIDBKeyRange | null = simplifyRange(query);
- const recordRequest: RecordGetRequest = {
+ const recordRequest: ObjectStoreGetQuery = {
objectStoreName: this._name,
- indexName: undefined,
- lastIndexPosition: undefined,
lastObjectStorePosition: undefined,
direction: "next",
limit: count,
@@ -1849,7 +1917,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
console.log("running getAll operation:", recordRequest);
}
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, recordRequest);
+ const result = await this._backend.getObjectStoreRecords(
+ btx,
+ recordRequest,
+ );
if (BridgeIDBFactory.enableTracing) {
console.log("get operation result count:", result.count);
@@ -1887,10 +1958,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
let keyRange: BridgeIDBKeyRange | null = simplifyRange(query);
- const recordRequest: RecordGetRequest = {
+ const recordRequest: ObjectStoreGetQuery = {
objectStoreName: this._name,
- indexName: undefined,
- lastIndexPosition: undefined,
lastObjectStorePosition: undefined,
direction: "next",
limit: 1,
@@ -1903,7 +1972,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
console.log("running getKey operation:", recordRequest);
}
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, recordRequest);
+ const result = await this._backend.getObjectStoreRecords(
+ btx,
+ recordRequest,
+ );
if (BridgeIDBFactory.enableTracing) {
console.log("getKey operation result count:", result.count);
@@ -1965,10 +2037,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
}
}
- const recordRequest: RecordGetRequest = {
+ const recordRequest: ObjectStoreGetQuery = {
objectStoreName: this._name,
- indexName: undefined,
- lastIndexPosition: undefined,
lastObjectStorePosition: undefined,
direction: "next",
limit: count,
@@ -1978,7 +2048,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, recordRequest);
+ const result = await this._backend.getObjectStoreRecords(
+ btx,
+ recordRequest,
+ );
const primaryKeys = result.primaryKeys;
if (!primaryKeys) {
@@ -2121,7 +2194,7 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
throw new InvalidStateError();
}
- if (this._indexNames.indexOf(indexName) >= 0) {
+ if (this._objectStoreMeta.indexSet.indexOf(indexName) >= 0) {
throw new ConstraintError();
}
@@ -2140,6 +2213,9 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
unique,
);
+ this._objectStoreMeta.indexSet.push(indexName);
+ this._objectStoreMeta.indexSet.sort();
+
const idx = this.index(indexName);
idx._justCreated = true;
return idx;
@@ -2154,13 +2230,20 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
if (this._transaction._finished) {
throw new InvalidStateError();
}
-
- const index = this._indexesCache.get(name);
+ const index = this._indexHandlesCache.get(name);
if (index !== undefined) {
return index;
}
- const newIndex = new BridgeIDBIndex(this, name);
- this._indexesCache.set(name, newIndex);
+ const indexMeta = this._backend.getIndexMeta(
+ this._backendConnection,
+ this._name,
+ name,
+ );
+ if (!indexMeta) {
+ throw new NotFoundError();
+ }
+ const newIndex = new BridgeIDBIndex(this, name, indexMeta);
+ this._indexHandlesCache.set(name, newIndex);
this._transaction._usedIndexes.push(newIndex);
return newIndex;
}
@@ -2180,12 +2263,15 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
const { btx } = this._confirmStartedBackendTransaction();
- const index = this._indexesCache.get(indexName);
+ const index = this._indexHandlesCache.get(indexName);
if (index !== undefined) {
index._deleted = true;
- this._indexesCache.delete(indexName);
+ this._indexHandlesCache.delete(indexName);
}
+ const indexIdx = this._objectStoreMeta.indexSet.indexOf(indexName);
+ this._objectStoreMeta.indexSet.splice(indexIdx, 1);
+
this._backend.deleteIndex(btx, this._name, indexName);
}
@@ -2198,11 +2284,9 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
key = BridgeIDBKeyRange.only(valueToKey(key));
}
- const recordGetRequest: RecordGetRequest = {
+ const recordGetRequest: ObjectStoreGetQuery = {
direction: "next",
- indexName: undefined,
- lastIndexPosition: undefined,
- limit: -1,
+ limit: 0,
objectStoreName: this._name,
lastObjectStorePosition: undefined,
range: key,
@@ -2211,7 +2295,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
const operation = async () => {
const { btx } = this._confirmStartedBackendTransaction();
- const result = await this._backend.getRecords(btx, recordGetRequest);
+ const result = await this._backend.getObjectStoreRecords(
+ btx,
+ recordGetRequest,
+ );
return result.count;
};
@@ -2223,7 +2310,6 @@ export class BridgeIDBObjectStore implements IDBObjectStore {
}
}
-/** @public */
export class BridgeIDBRequest extends FakeEventTarget implements IDBRequest {
_result: any = null;
_error: Error | null | undefined = null;
@@ -2294,7 +2380,6 @@ export class BridgeIDBRequest extends FakeEventTarget implements IDBRequest {
}
}
-/** @public */
export class BridgeIDBOpenDBRequest
extends BridgeIDBRequest
implements IDBOpenDBRequest
@@ -2343,7 +2428,6 @@ function waitMacroQueue(): Promise<void> {
}
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#transaction
-/** @public */
export class BridgeIDBTransaction
extends FakeEventTarget
implements IDBTransaction
@@ -2390,13 +2474,9 @@ export class BridgeIDBTransaction
get objectStoreNames(): DOMStringList {
if (!this._cachedObjectStoreNames) {
- if (this._openRequest) {
- this._cachedObjectStoreNames = this._db.objectStoreNames;
- } else {
- this._cachedObjectStoreNames = fakeDOMStringList(
- Array.from(this._scope).sort(),
- );
- }
+ this._cachedObjectStoreNames = fakeDOMStringList(
+ Array.from(this._scope).sort(),
+ );
}
return this._cachedObjectStoreNames;
}
@@ -2496,41 +2576,34 @@ export class BridgeIDBTransaction
}
}
+ // All steps before happened synchronously. Now
+ // we asynchronously roll back the backend transaction,
+ // if necessary/possible.
+
+ const maybeBtx = this._backendTransaction;
+ if (maybeBtx) {
+ this._backend.rollback(maybeBtx);
+ }
+
// "Any object stores and indexes which were created during the
// transaction are now considered deleted for the purposes of other
// algorithms."
if (this._db._upgradeTransaction) {
for (const os of this._usedObjectStores) {
- if (os._justCreated) {
- os._deleted = true;
- }
+ os._abort();
}
for (const ind of this._usedIndexes) {
- if (ind._justCreated) {
- ind._deleted = true;
- }
+ ind._abort();
}
}
+ this._db._version = this._db._initialVersion;
+
// ("abort a transaction", step 5.1)
if (this._openRequest) {
this._db._upgradeTransaction = null;
}
- // All steps before happened synchronously. Now
- // we asynchronously roll back the backend transaction,
- // if necessary/possible.
-
- const maybeBtx = this._backendTransaction;
- if (maybeBtx) {
- this._db._schema = this._backend.getInitialTransactionSchema(maybeBtx);
- // Only roll back if we actually executed the scheduled operations.
- await this._backend.rollback(maybeBtx);
- this._backendTransaction = undefined;
- } else {
- this._db._schema = this._backend.getSchema(this._db._backendConnection);
- }
-
queueTask(() => {
const event = new FakeEvent("abort", {
bubbles: true,
@@ -2560,22 +2633,29 @@ export class BridgeIDBTransaction
throw new TransactionInactiveError();
}
- if (!this._db._schema.objectStores[name]) {
+ if (!this._scope.has(name)) {
throw new NotFoundError();
}
- if (!this._db._upgradeTransaction) {
- if (!this._scope.has(name)) {
- throw new NotFoundError();
- }
- }
-
const objectStore = this._objectStoresCache.get(name);
if (objectStore !== undefined) {
return objectStore;
}
- const newObjectStore = new BridgeIDBObjectStore(this, name);
+ const objectStoreMeta = this._backend.getObjectStoreMeta(
+ this._db._backendConnection,
+ name,
+ );
+
+ if (!objectStoreMeta) {
+ throw new NotFoundError();
+ }
+
+ const newObjectStore = new BridgeIDBObjectStore(
+ this,
+ name,
+ objectStoreMeta,
+ );
this._objectStoresCache.set(name, newObjectStore);
this._usedObjectStores.push(newObjectStore);
return newObjectStore;
diff --git a/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts b/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts
index bbbcf9b94..14d4f7d6e 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT test abort-in-initial-upgradeneeded.htm", async (t) => {
await new Promise<void>((resolve, reject) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts b/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts
index 723a0abb5..1a730df0b 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// When db.close is called in upgradeneeded, the db is cleaned up on refresh
test("WPT test close-in-upgradeneeded.htm", (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts b/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts
index db2cdbca8..795d515ed 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts
@@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { IDBRequest } from "../idbtypes.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
const IDBKeyRange = BridgeIDBKeyRange;
diff --git a/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts b/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts
index acc2a7578..e57b48f76 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts
@@ -2,10 +2,13 @@ import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js";
import {
indexeddb_test,
+ initTestIndexedDB,
is_transaction_active,
keep_alive,
} from "./wptsupport.js";
+test.before("test DB initialization", initTestIndexedDB);
+
test("WPT test abort-in-initial-upgradeneeded.htm (subtest 1)", async (t) => {
// Transactions are active during success handlers
await indexeddb_test(
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts
index 108e7c91c..1bf5ca697 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts
@@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBCursor,BridgeIDBRequest } from "../bridge-idb.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT test idbcursor_advance_index.htm", async (t) => {
await new Promise<void>((resolve, reject) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts
index f8b3a0f01..3cea3e86d 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts
@@ -1,6 +1,9 @@
import test from "ava";
import { BridgeIDBCursor, BridgeIDBCursorWithValue } from "../bridge-idb.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+import { IDBDatabase } from "../idbtypes.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT test idbcursor_continue_index.htm", (t) => {
return new Promise((resolve, reject) => {
@@ -209,7 +212,7 @@ test("WPT idbcursor-continue-index4.htm", (t) => {
// IDBCursor.continue() - index - iterate using 'prevunique'
test("WPT idbcursor-continue-index5.htm", (t) => {
return new Promise((resolve, reject) => {
- var db: any;
+ var db: IDBDatabase;
const records = [
{ pKey: "primaryKey_0", iKey: "indexKey_0" },
{ pKey: "primaryKey_1", iKey: "indexKey_1" },
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts
index e3169195f..d8b6f2b31 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts
@@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.continue() - object store - iterate to the next record
test("WPT test idbcursor_continue_objectstore.htm", (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts
index f771d19a2..e159129da 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { indexeddb_test } from "./wptsupport.js";
+import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT idbcursor-delete-exception-order.htm", async (t) => {
// 'IDBCursor.delete exception order: TransactionInactiveError vs. ReadOnlyError'
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts
index 0232cf247..d34c9c3f9 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts
@@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js";
import { IDBCursor } from "../idbtypes.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.delete() - index - remove a record from the object store
test("WPT idbcursor-delete-index.htm", (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts
index 9410ca79e..2b9993b19 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts
@@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.delete() - object store - remove a record from the object store
test("WPT idbcursor-delete-objectstore.htm", (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts
index 54745802e..b13bd1fc3 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT idbcursor-reused.htm", async (t) => {
await new Promise<void>((resolve, reject) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts
index 81a7cd753..8a878b35a 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts
@@ -3,10 +3,13 @@ import { BridgeIDBCursor, BridgeIDBKeyRange } from "../bridge-idb.js";
import {
createDatabase,
createdb,
+ initTestIndexedDB,
promiseForRequest,
promiseForTransaction,
} from "./wptsupport.js";
+test.before("test DB initialization", initTestIndexedDB);
+
// IDBCursor.update() - index - modify a record in the object store
test("WPT test idbcursor_update_index.htm", (t) => {
return new Promise((resolve, reject) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts
index a6cb97612..450bec7be 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts
@@ -1,8 +1,10 @@
import test from "ava";
-import { idbFactory } from "./wptsupport.js";
+import { initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT idbfactory-cmp*.html", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
var greater = indexedDB.cmp(2, 1);
var equal = indexedDB.cmp(2, 2);
var less = indexedDB.cmp(1, 2);
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
index 02618f171..b8046fc1b 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts
@@ -1,7 +1,10 @@
import test from "ava";
import { BridgeIDBVersionChangeEvent } from "../bridge-idb.js";
import FakeEvent from "../util/FakeEvent.js";
-import { createdb, format_value, idbFactory } from "./wptsupport.js";
+import { createdb, format_value, initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js";
+import { IDBDatabase } from "../idbtypes.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBFactory.open() - request has no source
test("WPT idbfactory-open.htm", async (t) => {
@@ -36,7 +39,7 @@ test("WPT idbfactory-open2.htm", async (t) => {
// IDBFactory.open() - no version opens current database
test("WPT idbfactory-open3.htm", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var did_upgrade = false;
@@ -61,7 +64,6 @@ test("WPT idbfactory-open3.htm", async (t) => {
// IDBFactory.open() - new database has default version
test("WPT idbfactory-open4.htm", async (t) => {
- const indexedDB = idbFactory;
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, t.title + "-database_name");
@@ -78,7 +80,6 @@ test("WPT idbfactory-open4.htm", async (t) => {
// IDBFactory.open() - new database is empty
test("WPT idbfactory-open5.htm", async (t) => {
- const indexedDB = idbFactory;
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, t.title + "-database_name");
@@ -97,7 +98,7 @@ test("WPT idbfactory-open5.htm", async (t) => {
// IDBFactory.open() - open database with a lower version than current
test("WPT idbfactory-open6.htm", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var open_rq2: any;
@@ -131,7 +132,7 @@ test("WPT idbfactory-open6.htm", async (t) => {
// IDBFactory.open() - open database with a higher version than current
test("WPT idbfactory-open7.htm", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var did_upgrade = false;
@@ -169,7 +170,7 @@ test("WPT idbfactory-open7.htm", async (t) => {
// IDBFactory.open() - error in version change transaction aborts open
test("WPT idbfactory-open8.htm", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var did_upgrade = false;
@@ -193,7 +194,7 @@ test("WPT idbfactory-open8.htm", async (t) => {
// IDBFactory.open() - errors in version argument
test("WPT idbfactory-open9.htm", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
function should_throw(val: any, name?: string) {
if (!name) {
name = typeof val == "object" && val ? "object" : format_value(val);
@@ -281,9 +282,9 @@ test("WPT idbfactory-open9.htm", async (t) => {
// IDBFactory.open() - error in version change transaction aborts open
test("WPT idbfactory-open10.htm", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
- var db: any, db2: any;
+ var db: IDBDatabase, db2: IDBDatabase;
var open_rq = createdb(t, undefined, 9);
open_rq.onupgradeneeded = function (e: any) {
@@ -350,7 +351,7 @@ test("WPT idbfactory-open10.htm", async (t) => {
var open_rq3 = indexedDB.open(db.name);
open_rq3.onsuccess = function (e: any) {
- var db3 = e.target.result;
+ var db3: IDBDatabase = e.target.result;
t.true(
db3.objectStoreNames.contains("store"),
@@ -407,7 +408,7 @@ test("WPT idbfactory-open10.htm", async (t) => {
// IDBFactory.open() - second open's transaction is available to get objectStores
test("WPT idbfactory-open11.htm", async (t) => {
- const indexedDB = idbFactory;
+ const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var db: any;
var count_done = 0;
@@ -472,8 +473,6 @@ test("WPT idbfactory-open11.htm", async (t) => {
// IDBFactory.open() - upgradeneeded gets VersionChangeEvent
test("WPT idbfactory-open12.htm", async (t) => {
- const indexedDB = idbFactory;
-
var db: any;
var open_rq = createdb(t, undefined, 9);
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts
index d3b6e844e..ad8a57305 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts
@@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBIndex.get() - returns the record
test("WPT idbindex_get.htm", async (t) => {
@@ -93,7 +95,7 @@ test("WPT idbindex_get3.htm", async (t) => {
// IDBIndex.get() - returns the record with the first key in the range
test("WPT idbindex_get4.htm", async (t) => {
await new Promise<void>((resolve, reject) => {
- var db: any;
+ var db: IDBDatabase;
var open_rq = createdb(t);
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts
index 765bcf06a..5d61e68e5 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBIndex.openCursor() - throw InvalidStateError when the index is deleted
test("WPT test idbindex-openCursor.htm", (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts
index 901eda89c..60bf0cfb2 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts
@@ -1,5 +1,7 @@
import test, { ExecutionContext } from "ava";
-import { indexeddb_test } from "./wptsupport.js";
+import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
async function t1(t: ExecutionContext, method: string): Promise<void> {
await indexeddb_test(
@@ -55,8 +57,6 @@ async function t2(t: ExecutionContext, method: string): Promise<void> {
done();
}, 0);
-
- console.log(`queued task for ${method}`);
},
"t2",
);
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts
index e8bc17471..4941c43d6 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts
@@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.add() - add with an inline key
test("WPT idbobjectstore_add.htm", async (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts
index 79064d19d..922c2bcf4 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts
@@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.get() - key is a number
test("WPT idbobjectstore_get.htm", (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts
index 152e3a9c1..f051c57b6 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts
@@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.put() - put with an inline key
test("WPT idbobjectstore_put.htm", (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts
index a8aab828a..6f04552fa 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts
@@ -6,9 +6,12 @@ import {
createBooksStore,
createDatabase,
createNotBooksStore,
+ initTestIndexedDB,
migrateDatabase,
} from "./wptsupport.js";
+test.before("test DB initialization", initTestIndexedDB);
+
// IndexedDB: object store renaming support
// IndexedDB object store rename in new transaction
test("WPT idbobjectstore-rename-store.html (subtest 1)", async (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts
index a501ff2c9..f728cd487 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// IDBTransaction - complete event
test("WPT idbtransaction-oncomplete.htm", async (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts b/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts
index 7ef1301f7..f15f93873 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { assert_key_equals, createdb } from "./wptsupport.js";
+import { assert_key_equals, createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT test keypath.htm", async (t) => {
function keypath(
@@ -9,8 +11,6 @@ test("WPT test keypath.htm", async (t) => {
desc?: string,
) {
return new Promise<void>((resolve, reject) => {
- console.log("key path", keypath);
- console.log("checking", desc);
let db: any;
const store_name = "store-" + Date.now() + Math.random();
diff --git a/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts b/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts
index 526c06784..14c8f3be5 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts
@@ -1,6 +1,8 @@
import test from "ava";
import { EventTarget } from "../idbtypes.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// Bubbling and capturing of request events
test("WPT request_bubble-and-capture.htm", async (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts b/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts
index 9d76e79f2..971330e3d 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts
@@ -1,5 +1,7 @@
import test from "ava";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
// Transactions have a request queue
test("transaction-requestqueue.htm", async (t) => {
diff --git a/packages/idb-bridge/src/idb-wpt-ported/value.test.ts b/packages/idb-bridge/src/idb-wpt-ported/value.test.ts
index a80ec2b5a..95712e152 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/value.test.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/value.test.ts
@@ -1,6 +1,8 @@
import test from "ava";
import { IDBVersionChangeEvent } from "../idbtypes.js";
-import { createdb } from "./wptsupport.js";
+import { createdb, initTestIndexedDB } from "./wptsupport.js";
+
+test.before("test DB initialization", initTestIndexedDB);
test("WPT test value.htm, array", (t) => {
return new Promise((resolve, reject) => {
@@ -12,7 +14,6 @@ test("WPT test value.htm, array", (t) => {
createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) {
(e.target as any).result.createObjectStore("store").add(value, 1);
(e.target as any).onsuccess = (e: any) => {
- console.log("in first onsuccess");
e.target.result
.transaction("store")
.objectStore("store")
@@ -35,13 +36,10 @@ test("WPT test value.htm, date", (t) => {
createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) {
(e.target as any).result.createObjectStore("store").add(value, 1);
(e.target as any).onsuccess = (e: any) => {
- console.log("in first onsuccess");
e.target.result
.transaction("store")
.objectStore("store")
.get(1).onsuccess = (e: any) => {
- console.log("target", e.target);
- console.log("result", e.target.result);
t.assert(e.target.result instanceof _instanceof, "instanceof");
resolve();
};
diff --git a/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts b/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts
index 7f68a53e8..c648bf53f 100644
--- a/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts
+++ b/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts
@@ -1,5 +1,5 @@
import { ExecutionContext } from "ava";
-import { BridgeIDBFactory, BridgeIDBRequest } from "../bridge-idb.js";
+import { BridgeIDBRequest } from "../bridge-idb.js";
import {
IDBDatabase,
IDBIndex,
@@ -8,17 +8,10 @@ import {
IDBRequest,
IDBTransaction,
} from "../idbtypes.js";
-import { MemoryBackend } from "../MemoryBackend.js";
+import { initTestIndexedDB , useTestIndexedDb } from "../testingdb.js";
import { compareKeys } from "../util/cmp.js";
-BridgeIDBFactory.enableTracing = true;
-const backend = new MemoryBackend();
-backend.enableTracing = true;
-export const idbFactory = new BridgeIDBFactory(backend);
-
-const self = {
- indexedDB: idbFactory,
-};
+export { initTestIndexedDB, useTestIndexedDb } from "../testingdb.js"
export function createdb(
t: ExecutionContext<unknown>,
@@ -27,8 +20,8 @@ export function createdb(
): IDBOpenDBRequest {
var rq_open: IDBOpenDBRequest;
dbname = dbname ? dbname : "testdb-" + new Date().getTime() + Math.random();
- if (version) rq_open = self.indexedDB.open(dbname, version);
- else rq_open = self.indexedDB.open(dbname);
+ if (version) rq_open = useTestIndexedDb().open(dbname, version);
+ else rq_open = useTestIndexedDb().open(dbname);
return rq_open;
}
@@ -111,7 +104,7 @@ export async function migrateNamedDatabase(
migrationCallback: MigrationCallback,
): Promise<IDBDatabase> {
return new Promise<IDBDatabase>((resolve, reject) => {
- const request = self.indexedDB.open(databaseName, newVersion);
+ const request = useTestIndexedDb().open(databaseName, newVersion);
request.onupgradeneeded = (event: any) => {
const database = event.target.result;
const transaction = event.target.transaction;
@@ -175,7 +168,7 @@ export async function createDatabase(
setupCallback: MigrationCallback,
): Promise<IDBDatabase> {
const databaseName = makeDatabaseName(t.title);
- const request = self.indexedDB.deleteDatabase(databaseName);
+ const request = useTestIndexedDb().deleteDatabase(databaseName);
return migrateNamedDatabase(t, databaseName, 1, setupCallback);
}
@@ -463,9 +456,9 @@ export function indexeddb_test(
options = Object.assign({ upgrade_will_abort: false }, options);
const dbname =
"testdb-" + new Date().getTime() + Math.random() + (dbsuffix ?? "");
- var del = self.indexedDB.deleteDatabase(dbname);
+ var del = useTestIndexedDb().deleteDatabase(dbname);
del.onerror = () => t.fail("deleteDatabase should succeed");
- var open = self.indexedDB.open(dbname, 1);
+ var open = useTestIndexedDb().open(dbname, 1);
open.onupgradeneeded = function () {
var db = open.result;
t.teardown(function () {
@@ -474,7 +467,7 @@ export function indexeddb_test(
e.preventDefault();
};
db.close();
- self.indexedDB.deleteDatabase(db.name);
+ useTestIndexedDb().deleteDatabase(db.name);
});
var tx = open.transaction!;
upgrade_func(resolve, db, tx, open);
diff --git a/packages/idb-bridge/src/idbpromutil.ts b/packages/idb-bridge/src/idbpromutil.ts
new file mode 100644
index 000000000..e711db027
--- /dev/null
+++ b/packages/idb-bridge/src/idbpromutil.ts
@@ -0,0 +1,26 @@
+import { BridgeIDBTransaction } from "./bridge-idb.js";
+import { IDBRequest } from "./idbtypes.js";
+
+export function promiseFromRequest(request: IDBRequest): Promise<any> {
+ return new Promise((resolve, reject) => {
+ request.onsuccess = () => {
+ resolve(request.result);
+ };
+ request.onerror = () => {
+ reject(request.error);
+ };
+ });
+}
+
+export function promiseFromTransaction(
+ transaction: BridgeIDBTransaction,
+): Promise<void> {
+ return new Promise<void>((resolve, reject) => {
+ transaction.oncomplete = () => {
+ resolve();
+ };
+ transaction.onerror = () => {
+ reject();
+ };
+ });
+} \ No newline at end of file
diff --git a/packages/idb-bridge/src/idbtypes.ts b/packages/idb-bridge/src/idbtypes.ts
index a7878c38f..9ee93e050 100644
--- a/packages/idb-bridge/src/idbtypes.ts
+++ b/packages/idb-bridge/src/idbtypes.ts
@@ -19,48 +19,27 @@ and limitations under the License.
* Instead of ambient types, we export type declarations.
*/
-/**
- * @public
- */
export type IDBKeyPath = string;
-/**
- * @public
- */
export interface EventListener {
(evt: Event): void;
}
-/**
- * @public
- */
export interface EventListenerObject {
handleEvent(evt: Event): void;
}
-/**
- * @public
- */
export interface EventListenerOptions {
capture?: boolean;
}
-/**
- * @public
- */
export interface AddEventListenerOptions extends EventListenerOptions {
once?: boolean;
passive?: boolean;
}
-/**
- * @public
- */
export type IDBTransactionMode = "readonly" | "readwrite" | "versionchange";
-/**
- * @public
- */
export type EventListenerOrEventListenerObject =
| EventListener
| EventListenerObject;
@@ -68,8 +47,6 @@ export type EventListenerOrEventListenerObject =
/**
* EventTarget is a DOM interface implemented by objects that can receive
* events and may have listeners for them.
- *
- * @public
*/
export interface EventTarget {
/**
diff --git a/packages/idb-bridge/src/index.ts b/packages/idb-bridge/src/index.ts
index fc99b2ccd..47ff80119 100644
--- a/packages/idb-bridge/src/index.ts
+++ b/packages/idb-bridge/src/index.ts
@@ -2,14 +2,10 @@ import {
Backend,
DatabaseConnection,
DatabaseTransaction,
- IndexProperties,
- ObjectStoreProperties,
- RecordGetRequest,
RecordGetResponse,
RecordStoreRequest,
RecordStoreResponse,
ResultLevel,
- Schema,
StoreLevel,
} from "./backend-interface.js";
import {
@@ -36,6 +32,9 @@ import {
} from "./MemoryBackend.js";
import { Listener } from "./util/FakeEventTarget.js";
+export * from "./SqliteBackend.js";
+export * from "./sqlite3-interface.js";
+
export * from "./idbtypes.js";
export { MemoryBackend } from "./MemoryBackend.js";
export type { AccessStats } from "./MemoryBackend.js";
@@ -55,21 +54,17 @@ export {
};
export type {
DatabaseTransaction,
- RecordGetRequest,
RecordGetResponse,
- Schema,
Backend,
DatabaseList,
RecordStoreRequest,
RecordStoreResponse,
DatabaseConnection,
- ObjectStoreProperties,
RequestObj,
DatabaseDump,
ObjectStoreDump,
IndexRecord,
ObjectStoreRecord,
- IndexProperties,
MemoryBackendDump,
Event,
Listener,
diff --git a/packages/idb-bridge/src/node-sqlite3-impl.ts b/packages/idb-bridge/src/node-sqlite3-impl.ts
new file mode 100644
index 000000000..fa38d298f
--- /dev/null
+++ b/packages/idb-bridge/src/node-sqlite3-impl.ts
@@ -0,0 +1,84 @@
+/*
+ This file is part of GNU Taler
+ (C) 2023 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
+ */
+
+// @ts-ignore: optional dependency
+import type Database from "better-sqlite3";
+import {
+ ResultRow,
+ Sqlite3Interface,
+ Sqlite3Statement,
+} from "./sqlite3-interface.js";
+
+export async function createNodeSqlite3Impl(): Promise<Sqlite3Interface> {
+ // @ts-ignore: optional dependency
+ const bsq = (await import("better-sqlite3")).default;
+
+ return {
+ open(filename: string) {
+ const internalDbHandle = bsq(filename);
+ return {
+ internalDbHandle,
+ close() {
+ internalDbHandle.close();
+ },
+ prepare(stmtStr): Sqlite3Statement {
+ const stmtHandle = internalDbHandle.prepare(stmtStr);
+ return {
+ internalStatement: stmtHandle,
+ getAll(params): ResultRow[] {
+ let res: ResultRow[];
+ if (params === undefined) {
+ res = stmtHandle.all() as ResultRow[];
+ } else {
+ res = stmtHandle.all(params) as ResultRow[];
+ }
+ return res;
+ },
+ getFirst(params): ResultRow | undefined {
+ let res: ResultRow | undefined;
+ if (params === undefined) {
+ res = stmtHandle.get() as ResultRow | undefined;
+ } else {
+ res = stmtHandle.get(params) as ResultRow | undefined;
+ }
+ return res;
+ },
+ run(params) {
+ const myParams = [];
+ if (params !== undefined) {
+ myParams.push(params);
+ }
+ // The better-sqlite3 library doesn't like it we pass
+ // undefined directly.
+ let res: Database.RunResult;
+ if (params !== undefined) {
+ res = stmtHandle.run(params);
+ } else {
+ res = stmtHandle.run();
+ }
+ return {
+ lastInsertRowid: res.lastInsertRowid,
+ };
+ },
+ };
+ },
+ exec(sqlStr): void {
+ internalDbHandle.exec(sqlStr);
+ },
+ };
+ },
+ };
+}
diff --git a/packages/idb-bridge/src/sqlite3-interface.ts b/packages/idb-bridge/src/sqlite3-interface.ts
new file mode 100644
index 000000000..8668ef844
--- /dev/null
+++ b/packages/idb-bridge/src/sqlite3-interface.ts
@@ -0,0 +1,34 @@
+export type Sqlite3Database = {
+ internalDbHandle: any;
+ exec(sqlStr: string): void;
+ prepare(stmtStr: string): Sqlite3Statement;
+ close(): void;
+};
+export type Sqlite3Statement = {
+ internalStatement: any;
+
+ run(params?: BindParams): RunResult;
+ getAll(params?: BindParams): ResultRow[];
+ getFirst(params?: BindParams): ResultRow | undefined;
+};
+
+export interface RunResult {
+ lastInsertRowid: number | bigint;
+}
+
+export type Sqlite3Value = string | Uint8Array | number | null | bigint;
+
+export type BindParams = Record<string, Sqlite3Value | undefined>;
+export type ResultRow = Record<string, Sqlite3Value>;
+
+/**
+ * Common interface that multiple sqlite3 bindings
+ * (such as better-sqlite3 or qtart's sqlite3 bindings)
+ * can adapt to.
+ *
+ * This does not expose full sqlite3 functionality, but just enough
+ * to be used by our IndexedDB sqlite3 backend.
+ */
+export interface Sqlite3Interface {
+ open(filename: string): Sqlite3Database;
+}
diff --git a/packages/idb-bridge/src/testingdb.ts b/packages/idb-bridge/src/testingdb.ts
new file mode 100644
index 000000000..c6abffa0f
--- /dev/null
+++ b/packages/idb-bridge/src/testingdb.ts
@@ -0,0 +1,43 @@
+/*
+ Copyright 2023 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
+ */
+
+import { createSqliteBackend } from "./SqliteBackend.js";
+import { BridgeIDBFactory } from "./bridge-idb.js";
+import { IDBFactory } from "./idbtypes.js";
+import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
+
+let idbFactory: IDBFactory | undefined = undefined;
+
+export async function initTestIndexedDB(): Promise<void> {
+ // const backend = new MemoryBackend();
+ // backend.enableTracing = true;
+
+ const sqlite3Impl = await createNodeSqlite3Impl();
+
+ const backend = await createSqliteBackend(sqlite3Impl, {
+ filename: ":memory:",
+ });
+
+ idbFactory = new BridgeIDBFactory(backend);
+ backend.enableTracing = true;
+ BridgeIDBFactory.enableTracing = false;
+}
+
+export function useTestIndexedDb(): IDBFactory {
+ if (!idbFactory) {
+ throw Error("indexeddb factory not initialized");
+ }
+ return idbFactory;
+}
diff --git a/packages/idb-bridge/src/util/FakeDomEvent.ts b/packages/idb-bridge/src/util/FakeDomEvent.ts
new file mode 100644
index 000000000..b3ff298ec
--- /dev/null
+++ b/packages/idb-bridge/src/util/FakeDomEvent.ts
@@ -0,0 +1,103 @@
+/*
+ Copyright 2017 Jeremy Scheff
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ or implied. See the License for the specific language governing
+ permissions and limitations under the License.
+*/
+
+import FakeEventTarget from "./FakeEventTarget.js";
+import { Event, EventTarget } from "../idbtypes.js";
+
+/** @public */
+export type EventType =
+ | "abort"
+ | "blocked"
+ | "complete"
+ | "error"
+ | "success"
+ | "upgradeneeded"
+ | "versionchange";
+
+export class FakeDomEvent implements Event {
+ public eventPath: FakeEventTarget[] = [];
+ public type: EventType;
+
+ public readonly NONE = 0;
+ public readonly CAPTURING_PHASE = 1;
+ public readonly AT_TARGET = 2;
+ public readonly BUBBLING_PHASE = 3;
+
+ // Flags
+ public propagationStopped = false;
+ public immediatePropagationStopped = false;
+ public canceled = false;
+ public initialized = true;
+ public dispatched = false;
+
+ public target: FakeEventTarget | null = null;
+ public currentTarget: FakeEventTarget | null = null;
+
+ public eventPhase: 0 | 1 | 2 | 3 = 0;
+
+ public defaultPrevented = false;
+
+ public isTrusted = false;
+ public timeStamp = Date.now();
+
+ public bubbles: boolean;
+ public cancelable: boolean;
+
+ constructor(
+ type: EventType,
+ eventInitDict: { bubbles?: boolean; cancelable?: boolean } = {},
+ ) {
+ this.type = type;
+
+ this.bubbles =
+ eventInitDict.bubbles !== undefined ? eventInitDict.bubbles : false;
+ this.cancelable =
+ eventInitDict.cancelable !== undefined ? eventInitDict.cancelable : false;
+ }
+ cancelBubble: boolean = false;
+ composed: boolean = false;
+ returnValue: boolean = false;
+ get srcElement(): EventTarget | null {
+ return this.target;
+ }
+ composedPath(): EventTarget[] {
+ throw new Error("Method not implemented.");
+ }
+ initEvent(
+ type: string,
+ bubbles?: boolean | undefined,
+ cancelable?: boolean | undefined,
+ ): void {
+ throw new Error("Method not implemented.");
+ }
+
+ public preventDefault() {
+ if (this.cancelable) {
+ this.canceled = true;
+ }
+ }
+
+ public stopPropagation() {
+ this.propagationStopped = true;
+ }
+
+ public stopImmediatePropagation() {
+ this.propagationStopped = true;
+ this.immediatePropagationStopped = true;
+ }
+}
+
+export default FakeDomEvent;
diff --git a/packages/idb-bridge/src/util/FakeEventTarget.ts b/packages/idb-bridge/src/util/FakeEventTarget.ts
index 79f57cce3..839906a34 100644
--- a/packages/idb-bridge/src/util/FakeEventTarget.ts
+++ b/packages/idb-bridge/src/util/FakeEventTarget.ts
@@ -180,7 +180,7 @@ abstract class FakeEventTarget implements EventTarget {
fe.eventPath.reverse();
fe.eventPhase = event.BUBBLING_PHASE;
if (fe.eventPath.length === 0 && event.type === "error") {
- console.error("Unhandled error event: ", event.target);
+ console.error("Unhandled error event on target: ", event.target);
}
for (const obj of event.eventPath) {
if (!event.propagationStopped) {
diff --git a/packages/idb-bridge/src/util/extractKey.ts b/packages/idb-bridge/src/util/extractKey.ts
index 6a3d468ef..2a4ec45b9 100644
--- a/packages/idb-bridge/src/util/extractKey.ts
+++ b/packages/idb-bridge/src/util/extractKey.ts
@@ -19,7 +19,11 @@ import { IDBKeyPath, IDBValidKey } from "../idbtypes.js";
import { valueToKey } from "./valueToKey.js";
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#dfn-steps-for-extracting-a-key-from-a-value-using-a-key-path
+/**
+ * Algorithm to "extract a key from a value using a key path".
+ */
export const extractKey = (keyPath: IDBKeyPath | IDBKeyPath[], value: any) => {
+ //console.log(`extracting key ${JSON.stringify(keyPath)} from ${JSON.stringify(value)}`);
if (Array.isArray(keyPath)) {
const result: IDBValidKey[] = [];
diff --git a/packages/idb-bridge/src/util/key-storage.test.ts b/packages/idb-bridge/src/util/key-storage.test.ts
new file mode 100644
index 000000000..dc1e1827c
--- /dev/null
+++ b/packages/idb-bridge/src/util/key-storage.test.ts
@@ -0,0 +1,39 @@
+/*
+ This file is part of GNU Taler
+ (C) 2023 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
+ */
+
+import test, { ExecutionContext } from "ava";
+import { deserializeKey, serializeKey } from "./key-storage.js";
+import { IDBValidKey } from "../idbtypes.js";
+
+function checkKeySer(t: ExecutionContext, k: IDBValidKey): void {
+ const keyEnc = serializeKey(k);
+ const keyDec = deserializeKey(keyEnc);
+ t.deepEqual(k, keyDec);
+}
+
+test("basics", (t) => {
+ checkKeySer(t, "foo");
+ checkKeySer(t, "foo\0bar");
+ checkKeySer(t, "foo\u1000bar");
+ checkKeySer(t, "foo\u2000bar");
+ checkKeySer(t, "foo\u5000bar");
+ checkKeySer(t, "foo\uffffbar");
+ checkKeySer(t, 42);
+ checkKeySer(t, 255);
+ checkKeySer(t, 254);
+ checkKeySer(t, [1, 2, 3, 4]);
+ checkKeySer(t, [[[1], 3], [4]]);
+});
diff --git a/packages/idb-bridge/src/util/key-storage.ts b/packages/idb-bridge/src/util/key-storage.ts
new file mode 100644
index 000000000..b71548dd3
--- /dev/null
+++ b/packages/idb-bridge/src/util/key-storage.ts
@@ -0,0 +1,363 @@
+/*
+ This file is part of GNU Taler
+ (C) 2023 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
+ */
+
+/*
+Encoding rules (inspired by Firefox, but slightly simplified):
+
+Numbers: 0x10 n n n n n n n n
+Dates: 0x20 n n n n n n n n
+Strings: 0x30 s s s s ... 0
+Binaries: 0x40 s s s s ... 0
+Arrays: 0x50 i i i ... 0
+
+Numbers/dates are encoded as 64-bit IEEE 754 floats with the sign bit
+flipped, in order to make them sortable.
+*/
+
+/**
+ * Imports.
+ */
+import { IDBValidKey } from "../idbtypes.js";
+
+const tagNum = 0xa0;
+const tagDate = 0xb0;
+const tagString = 0xc0;
+const tagBinary = 0xc0;
+const tagArray = 0xe0;
+
+const oneByteOffset = 0x01;
+const twoByteOffset = 0x7f;
+const oneByteMax = 0x7e;
+const twoByteMax = 0x3fff + twoByteOffset;
+const twoByteMask = 0b1000_0000;
+const threeByteMask = 0b1100_0000;
+
+export function countEncSize(c: number): number {
+ if (c > twoByteMax) {
+ return 3;
+ }
+ if (c > oneByteMax) {
+ return 2;
+ }
+ return 1;
+}
+
+export function writeEnc(dv: DataView, offset: number, c: number): number {
+ if (c > twoByteMax) {
+ dv.setUint8(offset + 2, (c & 0xff) << 6);
+ dv.setUint8(offset + 1, (c >>> 2) & 0xff);
+ dv.setUint8(offset, threeByteMask | (c >>> 10));
+ return 3;
+ } else if (c > oneByteMax) {
+ c -= twoByteOffset;
+ dv.setUint8(offset + 1, c & 0xff);
+ dv.setUint8(offset, (c >>> 8) | twoByteMask);
+ return 2;
+ } else {
+ c += oneByteOffset;
+ dv.setUint8(offset, c);
+ return 1;
+ }
+}
+
+export function internalSerializeString(
+ dv: DataView,
+ offset: number,
+ key: string,
+): number {
+ dv.setUint8(offset, tagString);
+ let n = 1;
+ for (let i = 0; i < key.length; i++) {
+ let c = key.charCodeAt(i);
+ n += writeEnc(dv, offset + n, c);
+ }
+ // Null terminator
+ dv.setUint8(offset + n, 0);
+ n++;
+ return n;
+}
+
+export function countSerializeKey(key: IDBValidKey): number {
+ if (typeof key === "number") {
+ return 9;
+ }
+ if (key instanceof Date) {
+ return 9;
+ }
+ if (key instanceof ArrayBuffer) {
+ let len = 2;
+ const uv = new Uint8Array(key);
+ for (let i = 0; i < uv.length; i++) {
+ len += countEncSize(uv[i]);
+ }
+ return len;
+ }
+ if (ArrayBuffer.isView(key)) {
+ let len = 2;
+ const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
+ for (let i = 0; i < uv.length; i++) {
+ len += countEncSize(uv[i]);
+ }
+ return len;
+ }
+ if (typeof key === "string") {
+ let len = 2;
+ for (let i = 0; i < key.length; i++) {
+ len += countEncSize(key.charCodeAt(i));
+ }
+ return len;
+ }
+ if (Array.isArray(key)) {
+ let len = 2;
+ for (let i = 0; i < key.length; i++) {
+ len += countSerializeKey(key[i]);
+ }
+ return len;
+ }
+ throw Error("unsupported type for key");
+}
+
+function internalSerializeNumeric(
+ dv: DataView,
+ offset: number,
+ tag: number,
+ val: number,
+): number {
+ dv.setUint8(offset, tagNum);
+ dv.setFloat64(offset + 1, val);
+ // Flip sign bit
+ let b = dv.getUint8(offset + 1);
+ b ^= 0x80;
+ dv.setUint8(offset + 1, b);
+ return 9;
+}
+
+function internalSerializeArray(
+ dv: DataView,
+ offset: number,
+ key: any[],
+): number {
+ dv.setUint8(offset, tagArray);
+ let n = 1;
+ for (let i = 0; i < key.length; i++) {
+ n += internalSerializeKey(key[i], dv, offset + n);
+ }
+ dv.setUint8(offset + n, 0);
+ n++;
+ return n;
+}
+
+function internalSerializeBinary(
+ dv: DataView,
+ offset: number,
+ key: Uint8Array,
+): number {
+ dv.setUint8(offset, tagBinary);
+ let n = 1;
+ for (let i = 0; i < key.length; i++) {
+ n += internalSerializeKey(key[i], dv, offset + n);
+ }
+ dv.setUint8(offset + n, 0);
+ n++;
+ return n;
+}
+
+function internalSerializeKey(
+ key: IDBValidKey,
+ dv: DataView,
+ offset: number,
+): number {
+ if (typeof key === "number") {
+ return internalSerializeNumeric(dv, offset, tagNum, key);
+ }
+ if (key instanceof Date) {
+ return internalSerializeNumeric(dv, offset, tagDate, key.getDate());
+ }
+ if (typeof key === "string") {
+ return internalSerializeString(dv, offset, key);
+ }
+ if (Array.isArray(key)) {
+ return internalSerializeArray(dv, offset, key);
+ }
+ if (key instanceof ArrayBuffer) {
+ return internalSerializeBinary(dv, offset, new Uint8Array(key));
+ }
+ if (ArrayBuffer.isView(key)) {
+ const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
+ return internalSerializeBinary(dv, offset, uv);
+ }
+ throw Error("unsupported type for key");
+}
+
+export function serializeKey(key: IDBValidKey): Uint8Array {
+ const len = countSerializeKey(key);
+ let buf = new Uint8Array(len);
+ const outLen = internalSerializeKey(key, new DataView(buf.buffer), 0);
+ if (len != outLen) {
+ throw Error("internal invariant failed");
+ }
+ let numTrailingZeroes = 0;
+ for (let i = buf.length - 1; i >= 0 && buf[i] == 0; i--, numTrailingZeroes++);
+ if (numTrailingZeroes > 0) {
+ buf = buf.slice(0, buf.length - numTrailingZeroes);
+ }
+ return buf;
+}
+
+function internalReadString(dv: DataView, offset: number): [number, string] {
+ const chars: string[] = [];
+ while (offset < dv.byteLength) {
+ const v = dv.getUint8(offset);
+ if (v == 0) {
+ // Got end-of-string.
+ offset += 1;
+ break;
+ }
+ let c: number;
+ if ((v & threeByteMask) === threeByteMask) {
+ const b1 = v;
+ const b2 = dv.getUint8(offset + 1);
+ const b3 = dv.getUint8(offset + 2);
+ c = (b1 << 10) | (b2 << 2) | (b3 >> 6);
+ offset += 3;
+ } else if ((v & twoByteMask) === twoByteMask) {
+ const b1 = v & ~twoByteMask;
+ const b2 = dv.getUint8(offset + 1);
+ c = ((b1 << 8) | b2) + twoByteOffset;
+ offset += 2;
+ } else {
+ c = v - oneByteOffset;
+ offset += 1;
+ }
+ chars.push(String.fromCharCode(c));
+ }
+ return [offset, chars.join("")];
+}
+
+function internalReadBytes(dv: DataView, offset: number): [number, Uint8Array] {
+ let count = 0;
+ while (offset + count < dv.byteLength) {
+ const v = dv.getUint8(offset + count);
+ if (v === 0) {
+ break;
+ }
+ count++;
+ }
+ let writePos = 0;
+ const bytes = new Uint8Array(count);
+ while (offset < dv.byteLength) {
+ const v = dv.getUint8(offset);
+ if (v == 0) {
+ offset += 1;
+ break;
+ }
+ let c: number;
+ if ((v & threeByteMask) === threeByteMask) {
+ const b1 = v;
+ const b2 = dv.getUint8(offset + 1);
+ const b3 = dv.getUint8(offset + 2);
+ c = (b1 << 10) | (b2 << 2) | (b3 >> 6);
+ offset += 3;
+ } else if ((v & twoByteMask) === twoByteMask) {
+ const b1 = v & ~twoByteMask;
+ const b2 = dv.getUint8(offset + 1);
+ c = ((b1 << 8) | b2) + twoByteOffset;
+ offset += 2;
+ } else {
+ c = v - oneByteOffset;
+ offset += 1;
+ }
+ bytes[writePos] = c;
+ writePos++;
+ }
+ return [offset, bytes];
+}
+
+/**
+ * Same as DataView.getFloat64, but logically pad input
+ * with zeroes on the right if read offset would be out
+ * of bounds.
+ *
+ * This allows reading from buffers where zeros have been
+ * truncated.
+ */
+function getFloat64Trunc(dv: DataView, offset: number): number {
+ if (offset + 7 >= dv.byteLength) {
+ const buf = new Uint8Array(8);
+ for (let i = offset; i < dv.byteLength; i++) {
+ buf[i - offset] = dv.getUint8(i);
+ }
+ const dv2 = new DataView(buf.buffer);
+ return dv2.getFloat64(0);
+ } else {
+ return dv.getFloat64(offset);
+ }
+}
+
+function internalDeserializeKey(
+ dv: DataView,
+ offset: number,
+): [number, IDBValidKey] {
+ let tag = dv.getUint8(offset);
+ switch (tag) {
+ case tagNum: {
+ const num = -getFloat64Trunc(dv, offset + 1);
+ const newOffset = Math.min(offset + 9, dv.byteLength);
+ return [newOffset, num];
+ }
+ case tagDate: {
+ const num = -getFloat64Trunc(dv, offset + 1);
+ const newOffset = Math.min(offset + 9, dv.byteLength);
+ return [newOffset, new Date(num)];
+ }
+ case tagString: {
+ return internalReadString(dv, offset + 1);
+ }
+ case tagBinary: {
+ return internalReadBytes(dv, offset + 1);
+ }
+ case tagArray: {
+ const arr: any[] = [];
+ offset += 1;
+ while (offset < dv.byteLength) {
+ const innerTag = dv.getUint8(offset);
+ if (innerTag === 0) {
+ offset++;
+ break;
+ }
+ const [innerOff, innerVal] = internalDeserializeKey(dv, offset);
+ arr.push(innerVal);
+ offset = innerOff;
+ }
+ return [offset, arr];
+ }
+ default:
+ throw Error("invalid key (unrecognized tag)");
+ }
+}
+
+export function deserializeKey(encodedKey: Uint8Array): IDBValidKey {
+ const dv = new DataView(
+ encodedKey.buffer,
+ encodedKey.byteOffset,
+ encodedKey.byteLength,
+ );
+ let [off, res] = internalDeserializeKey(dv, 0);
+ if (off != encodedKey.byteLength) {
+ throw Error("internal invariant failed");
+ }
+ return res;
+}
diff --git a/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts b/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts
index 971697021..c1216fe97 100644
--- a/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts
+++ b/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts
@@ -20,55 +20,73 @@ import { makeStoreKeyValue } from "./makeStoreKeyValue.js";
test("basics", (t) => {
let result;
- result = makeStoreKeyValue({ name: "Florian" }, undefined, 42, true, "id");
+ result = makeStoreKeyValue({
+ value: { name: "Florian" },
+ key: undefined,
+ currentKeyGenerator: 42,
+ autoIncrement: true,
+ keyPath: "id",
+ });
t.is(result.updatedKeyGenerator, 43);
t.is(result.key, 42);
t.is(result.value.name, "Florian");
t.is(result.value.id, 42);
- result = makeStoreKeyValue(
- { name: "Florian", id: 10 },
- undefined,
- 5,
- true,
- "id",
- );
+ result = makeStoreKeyValue({
+ value: { name: "Florian", id: 10 },
+ key: undefined,
+ currentKeyGenerator: 5,
+ autoIncrement: true,
+ keyPath: "id",
+ });
t.is(result.updatedKeyGenerator, 11);
t.is(result.key, 10);
t.is(result.value.name, "Florian");
t.is(result.value.id, 10);
- result = makeStoreKeyValue(
- { name: "Florian", id: 5 },
- undefined,
- 10,
- true,
- "id",
- );
+ result = makeStoreKeyValue({
+ value: { name: "Florian", id: 5 },
+ key: undefined,
+ currentKeyGenerator: 10,
+ autoIncrement: true,
+ keyPath: "id",
+ });
t.is(result.updatedKeyGenerator, 10);
t.is(result.key, 5);
t.is(result.value.name, "Florian");
t.is(result.value.id, 5);
- result = makeStoreKeyValue(
- { name: "Florian", id: "foo" },
- undefined,
- 10,
- true,
- "id",
- );
+ result = makeStoreKeyValue({
+ value: { name: "Florian", id: "foo" },
+ key: undefined,
+ currentKeyGenerator: 10,
+ autoIncrement: true,
+ keyPath: "id",
+ });
t.is(result.updatedKeyGenerator, 10);
t.is(result.key, "foo");
t.is(result.value.name, "Florian");
t.is(result.value.id, "foo");
- result = makeStoreKeyValue({ name: "Florian" }, "foo", 10, true, null);
+ result = makeStoreKeyValue({
+ value: { name: "Florian" },
+ key: "foo",
+ currentKeyGenerator: 10,
+ autoIncrement: true,
+ keyPath: null,
+ });
t.is(result.updatedKeyGenerator, 10);
t.is(result.key, "foo");
t.is(result.value.name, "Florian");
t.is(result.value.id, undefined);
- result = makeStoreKeyValue({ name: "Florian" }, undefined, 10, true, null);
+ result = makeStoreKeyValue({
+ value: { name: "Florian" },
+ key: undefined,
+ currentKeyGenerator: 10,
+ autoIncrement: true,
+ keyPath: null,
+ });
t.is(result.updatedKeyGenerator, 11);
t.is(result.key, 10);
t.is(result.value.name, "Florian");
diff --git a/packages/idb-bridge/src/util/makeStoreKeyValue.ts b/packages/idb-bridge/src/util/makeStoreKeyValue.ts
index 4c7dab8d2..153cd9d81 100644
--- a/packages/idb-bridge/src/util/makeStoreKeyValue.ts
+++ b/packages/idb-bridge/src/util/makeStoreKeyValue.ts
@@ -75,19 +75,25 @@ function injectKey(
return newValue;
}
-export function makeStoreKeyValue(
- value: any,
- key: IDBValidKey | undefined,
- currentKeyGenerator: number,
- autoIncrement: boolean,
- keyPath: IDBKeyPath | IDBKeyPath[] | null,
-): StoreKeyResult {
+export interface MakeStoreKvRequest {
+ value: any;
+ key: IDBValidKey | undefined;
+ currentKeyGenerator: number;
+ autoIncrement: boolean;
+ keyPath: IDBKeyPath | IDBKeyPath[] | null;
+}
+
+export function makeStoreKeyValue(req: MakeStoreKvRequest): StoreKeyResult {
+ const { keyPath, currentKeyGenerator, autoIncrement } = req;
+ let { key, value } = req;
+
const haveKey = key !== null && key !== undefined;
const haveKeyPath = keyPath !== null && keyPath !== undefined;
// This models a decision table on (haveKey, haveKeyPath, autoIncrement)
try {
+ // FIXME: Perf: only do this if we need to inject something.
value = structuredClone(value);
} catch (e) {
throw new DataCloneError();
diff --git a/packages/idb-bridge/src/util/queueTask.ts b/packages/idb-bridge/src/util/queueTask.ts
index 297602c67..f8a6e799f 100644
--- a/packages/idb-bridge/src/util/queueTask.ts
+++ b/packages/idb-bridge/src/util/queueTask.ts
@@ -14,6 +14,11 @@
permissions and limitations under the License.
*/
+/**
+ * Queue a task to be executed *after* the microtask
+ * queue has been processed, but *before* subsequent setTimeout / setImmediate
+ * tasks.
+ */
export function queueTask(fn: () => void) {
let called = false;
const callFirst = () => {
diff --git a/packages/idb-bridge/src/util/structuredClone.test.ts b/packages/idb-bridge/src/util/structuredClone.test.ts
index 0c613e6cc..e13d4117f 100644
--- a/packages/idb-bridge/src/util/structuredClone.test.ts
+++ b/packages/idb-bridge/src/util/structuredClone.test.ts
@@ -15,7 +15,11 @@
*/
import test, { ExecutionContext } from "ava";
-import { structuredClone } from "./structuredClone.js";
+import {
+ structuredClone,
+ structuredEncapsulate,
+ structuredRevive,
+} from "./structuredClone.js";
function checkClone(t: ExecutionContext, x: any): void {
t.deepEqual(structuredClone(x), x);
@@ -59,3 +63,58 @@ test("structured clone (object cycles)", (t) => {
const obj1Clone = structuredClone(obj1);
t.is(obj1Clone, obj1Clone.c);
});
+
+test("encapsulate", (t) => {
+ t.deepEqual(structuredEncapsulate(42), 42);
+ t.deepEqual(structuredEncapsulate(true), true);
+ t.deepEqual(structuredEncapsulate(false), false);
+ t.deepEqual(structuredEncapsulate(null), null);
+
+ t.deepEqual(structuredEncapsulate(undefined), { $: "undef" });
+ t.deepEqual(structuredEncapsulate(42n), { $: "bigint", val: "42" });
+
+ t.deepEqual(structuredEncapsulate(new Date(42)), { $: "date", val: 42 });
+
+ t.deepEqual(structuredEncapsulate({ x: 42 }), { x: 42 });
+
+ t.deepEqual(structuredEncapsulate({ $: "bla", x: 42 }), {
+ $: "obj",
+ val: { $: "bla", x: 42 },
+ });
+
+ const x = { foo: 42, bar: {} } as any;
+ x.bar.baz = x;
+
+ t.deepEqual(structuredEncapsulate(x), {
+ foo: 42,
+ bar: {
+ baz: { $: "ref", d: 2, p: [] },
+ },
+ });
+});
+
+test("revive", (t) => {
+ t.deepEqual(structuredRevive(42), 42);
+ t.deepEqual(structuredRevive([1, 2, 3]), [1, 2, 3]);
+ t.deepEqual(structuredRevive(true), true);
+ t.deepEqual(structuredRevive(false), false);
+ t.deepEqual(structuredRevive(null), null);
+ t.deepEqual(structuredRevive({ $: "undef" }), undefined);
+ t.deepEqual(structuredRevive({ x: { $: "undef" } }), { x: undefined });
+
+ t.deepEqual(structuredRevive({ $: "date", val: 42}), new Date(42));
+
+ {
+ const x = { foo: 42, bar: {} } as any;
+ x.bar.baz = x;
+
+ const r = {
+ foo: 42,
+ bar: {
+ baz: { $: "ref", d: 2, p: [] },
+ },
+ };
+
+ t.deepEqual(structuredRevive(r), x);
+ }
+});
diff --git a/packages/idb-bridge/src/util/structuredClone.ts b/packages/idb-bridge/src/util/structuredClone.ts
index 2170118d5..2f857c6c5 100644
--- a/packages/idb-bridge/src/util/structuredClone.ts
+++ b/packages/idb-bridge/src/util/structuredClone.ts
@@ -16,22 +16,21 @@
/**
* Encoding (new, compositional version):
- *
+ *
* Encapsulate object that itself might contain a "$" field:
- * { $: { E... } }
+ * { $: "obj", val: ... }
+ * (Outer level only:) Wrap other values into object
+ * { $: "lit", val: ... }
* Circular reference:
- * { $: ["ref", uplevel, field...] }
+ * { $: "ref" l: uplevel, p: path }
* Date:
- * { $: ["data"], val: datestr }
+ * { $: "date", val: datestr }
* Bigint:
- * { $: ["bigint"], val: bigintstr }
+ * { $: "bigint", val: bigintstr }
* Array with special (non-number) attributes:
- * { $: ["array"], val: arrayobj }
+ * { $: "array", val: arrayobj }
* Undefined field
* { $: "undef" }
- *
- * Legacy (top-level only), for backwards compatibility:
- * { $types: [...] }
*/
/**
@@ -261,22 +260,18 @@ export function mkDeepCloneCheckOnly() {
function internalEncapsulate(
val: any,
- outRoot: any,
path: string[],
memo: Map<any, string[]>,
- types: Array<[string[], string]>,
): any {
const memoPath = memo.get(val);
if (memoPath) {
- types.push([path, "ref"]);
- return memoPath;
+ return { $: "ref", d: path.length, p: memoPath };
}
if (val === null) {
return null;
}
if (val === undefined) {
- types.push([path, "undef"]);
- return 0;
+ return { $: "undef" };
}
if (Array.isArray(val)) {
memo.set(val, path);
@@ -289,31 +284,33 @@ function internalEncapsulate(
break;
}
}
- if (special) {
- types.push([path, "array"]);
- }
for (const x in val) {
const p = [...path, x];
- outArr[x] = internalEncapsulate(val[x], outRoot, p, memo, types);
+ outArr[x] = internalEncapsulate(val[x], p, memo);
+ }
+ if (special) {
+ return { $: "array", val: outArr };
+ } else {
+ return outArr;
}
- return outArr;
}
if (val instanceof Date) {
- types.push([path, "date"]);
- return val.getTime();
+ return { $: "date", val: val.getTime() };
}
if (isUserObject(val) || isPlainObject(val)) {
memo.set(val, path);
const outObj: any = {};
for (const x in val) {
const p = [...path, x];
- outObj[x] = internalEncapsulate(val[x], outRoot, p, memo, types);
+ outObj[x] = internalEncapsulate(val[x], p, memo);
+ }
+ if ("$" in outObj) {
+ return { $: "obj", val: outObj };
}
return outObj;
}
if (typeof val === "bigint") {
- types.push([path, "bigint"]);
- return val.toString();
+ return { $: "bigint", val: val.toString() };
}
if (typeof val === "boolean") {
return val;
@@ -327,123 +324,103 @@ function internalEncapsulate(
throw Error();
}
-/**
- * Encapsulate a cloneable value into a plain JSON object.
- */
-export function structuredEncapsulate(val: any): any {
- const outRoot = {};
- const types: Array<[string[], string]> = [];
- let res;
- res = internalEncapsulate(val, outRoot, [], new Map(), types);
- if (res === null) {
- return res;
- }
- // We need to further encapsulate the outer layer
- if (
- Array.isArray(res) ||
- typeof res !== "object" ||
- "$" in res ||
- "$types" in res
- ) {
- res = { $: res };
- }
- if (types.length > 0) {
- res["$types"] = types;
- }
- return res;
+function derefPath(
+ root: any,
+ p1: Array<string | number>,
+ n: number,
+ p2: Array<string | number>,
+): any {
+ let v = root;
+ for (let i = 0; i < n; i++) {
+ v = v[p1[i]];
+ }
+ for (let i = 0; i < p2.length; i++) {
+ v = v[p2[i]];
+ }
+ return v;
}
-export function applyLegacyTypeAnnotations(val: any): any {
- if (val === null) {
- return null;
+function internalReviveArray(sval: any, root: any, path: string[]): any {
+ const newArr: any[] = [];
+ if (root === undefined) {
+ root = newArr;
}
- if (typeof val === "number") {
- return val;
+ for (let i = 0; i < sval.length; i++) {
+ const p = [...path, String(i)];
+ newArr.push(internalStructuredRevive(sval[i], root, p));
}
- if (typeof val === "string") {
- return val;
+ return newArr;
+}
+
+function internalReviveObject(sval: any, root: any, path: string[]): any {
+ const newObj = {} as any;
+ if (root === undefined) {
+ root = newObj;
}
- if (typeof val === "boolean") {
- return val;
+ for (const key of Object.keys(sval)) {
+ const p = [...path, key];
+ newObj[key] = internalStructuredRevive(sval[key], root, p);
}
- if (!isPlainObject(val)) {
- throw Error();
- }
- let types = val.$types ?? [];
- delete val.$types;
- let outRoot: any;
- if ("$" in val) {
- outRoot = val.$;
- } else {
- outRoot = val;
- }
- function mutatePath(path: string[], f: (x: any) => any): void {
- if (path.length == 0) {
- outRoot = f(outRoot);
- return;
- }
- let obj = outRoot;
- for (let i = 0; i < path.length - 1; i++) {
- const n = path[i];
- if (!(n in obj)) {
- obj[n] = {};
- }
- obj = obj[n];
- }
- const last = path[path.length - 1];
- obj[last] = f(obj[last]);
+ return newObj;
+}
+
+function internalStructuredRevive(sval: any, root: any, path: string[]): any {
+ if (typeof sval === "string") {
+ return sval;
}
- function lookupPath(path: string[]): any {
- let obj = outRoot;
- for (const n of path) {
- obj = obj[n];
- }
- return obj;
+ if (typeof sval === "number") {
+ return sval;
}
- for (const [path, type] of types) {
- switch (type) {
- case "bigint": {
- mutatePath(path, (x) => BigInt(x));
- break;
- }
- case "array": {
- mutatePath(path, (x) => {
- const newArr: any = [];
- for (const k in x) {
- newArr[k] = x[k];
- }
- return newArr;
- });
- break;
- }
- case "date": {
- mutatePath(path, (x) => new Date(x));
- break;
- }
- case "undef": {
- mutatePath(path, (x) => undefined);
- break;
- }
- case "ref": {
- mutatePath(path, (x) => lookupPath(x));
- break;
+ if (typeof sval === "boolean") {
+ return sval;
+ }
+ if (sval === null) {
+ return null;
+ }
+ if (Array.isArray(sval)) {
+ return internalReviveArray(sval, root, path);
+ }
+
+ if (isUserObject(sval) || isPlainObject(sval)) {
+ if ("$" in sval) {
+ const dollar = sval.$;
+ switch (dollar) {
+ case "undef":
+ return undefined;
+ case "bigint":
+ return BigInt((sval as any).val);
+ case "date":
+ return new Date((sval as any).val);
+ case "obj": {
+ return internalReviveObject((sval as any).val, root, path);
+ }
+ case "array":
+ return internalReviveArray((sval as any).val, root, path);
+ case "ref": {
+ const level = (sval as any).l;
+ const p2 = (sval as any).p;
+ return derefPath(root, path, path.length - level, p2);
+ }
+ default:
+ throw Error();
}
- default:
- throw Error(`type '${type}' not implemented`);
+ } else {
+ return internalReviveObject(sval, root, path);
}
}
- return outRoot;
+
+ throw Error();
}
-export function internalStructuredRevive(val: any): any {
- // FIXME: Do the newly specified, compositional encoding here.
- val = JSON.parse(JSON.stringify(val));
- return val;
+/**
+ * Encapsulate a cloneable value into a plain JSON value.
+ */
+export function structuredEncapsulate(val: any): any {
+ return internalEncapsulate(val, [], new Map());
}
-export function structuredRevive(val: any): any {
- const r = internalStructuredRevive(val);
- return applyLegacyTypeAnnotations(r);
+export function structuredRevive(sval: any): any {
+ return internalStructuredRevive(sval, undefined, []);
}
/**
diff --git a/packages/idb-bridge/src/util/valueToKey.ts b/packages/idb-bridge/src/util/valueToKey.ts
index 6df82af81..0cd824689 100644
--- a/packages/idb-bridge/src/util/valueToKey.ts
+++ b/packages/idb-bridge/src/util/valueToKey.ts
@@ -17,7 +17,11 @@
import { IDBValidKey } from "../idbtypes.js";
import { DataError } from "./errors.js";
-// https://www.w3.org/TR/IndexedDB-2/#convert-a-value-to-a-key
+/**
+ * Algorithm to "convert a value to a key".
+ *
+ * https://www.w3.org/TR/IndexedDB/#convert-value-to-key
+ */
export function valueToKey(
input: any,
seen?: Set<object>,
diff --git a/packages/idb-bridge/tsconfig.json b/packages/idb-bridge/tsconfig.json
index b0a6808f4..19e9c2a74 100644
--- a/packages/idb-bridge/tsconfig.json
+++ b/packages/idb-bridge/tsconfig.json
@@ -4,7 +4,7 @@
"lib": ["es6"],
"module": "ES2020",
"moduleResolution": "Node16",
- "target": "ES6",
+ "target": "ES2020",
"allowJs": true,
"noImplicitAny": true,
"outDir": "lib",
diff --git a/packages/taler-util/package.json b/packages/taler-util/package.json
index 73eed814c..504b8259f 100644
--- a/packages/taler-util/package.json
+++ b/packages/taler-util/package.json
@@ -11,6 +11,7 @@
".": {
"node": "./lib/index.node.js",
"browser": "./lib/index.browser.js",
+ "qtart": "./lib/index.qtart.js",
"default": "./lib/index.js"
},
"./twrpc": {
diff --git a/packages/taler-util/src/index.qtart.ts b/packages/taler-util/src/index.qtart.ts
new file mode 100644
index 000000000..ddb9bcfd4
--- /dev/null
+++ b/packages/taler-util/src/index.qtart.ts
@@ -0,0 +1,27 @@
+/*
+ This file is part of GNU Taler
+ (C) 2021 Taler Systems S.A.
+
+ GNU Taler is free software; you can redistribute it and/or modify it under the
+ terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3, or (at your option) any later version.
+
+ GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+ A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with
+ GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
+ */
+
+import { setPRNG } from "./nacl-fast.js";
+
+setPRNG(function (x: Uint8Array, n: number) {
+ // @ts-ignore
+ const va = globalThis._tart.randomBytes(n);
+ const v = new Uint8Array(va);
+ for (let i = 0; i < n; i++) x[i] = v[i];
+ for (let i = 0; i < v.length; i++) v[i] = 0;
+});
+
+export * from "./index.js";
diff --git a/packages/taler-util/src/transactions-types.ts b/packages/taler-util/src/transactions-types.ts
index 2d278e3e8..6331bc731 100644
--- a/packages/taler-util/src/transactions-types.ts
+++ b/packages/taler-util/src/transactions-types.ts
@@ -48,6 +48,7 @@ import {
RefreshReason,
TalerErrorDetail,
TransactionIdStr,
+ TransactionStateFilter,
} from "./wallet-types.js";
export interface TransactionsRequest {
@@ -65,6 +66,8 @@ export interface TransactionsRequest {
* If true, include all refreshes in the transactions list.
*/
includeRefreshes?: boolean;
+
+ filterByState?: TransactionStateFilter
}
export interface TransactionState {
diff --git a/packages/taler-util/src/wallet-types.ts b/packages/taler-util/src/wallet-types.ts
index 3179cd6f3..04fb43ec6 100644
--- a/packages/taler-util/src/wallet-types.ts
+++ b/packages/taler-util/src/wallet-types.ts
@@ -2644,3 +2644,10 @@ export const codecForValidateIbanResponse = (): Codec<ValidateIbanResponse> =>
buildCodecForObject<ValidateIbanResponse>()
.property("valid", codecForBoolean())
.build("ValidateIbanResponse");
+
+export type TransactionStateFilter = "nonfinal";
+
+export interface TransactionRecordFilter {
+ onlyState?: TransactionStateFilter;
+ onlyCurrency?: string;
+}
diff --git a/packages/taler-wallet-cli/Makefile b/packages/taler-wallet-cli/Makefile
index df2de4d7c..388401eae 100644
--- a/packages/taler-wallet-cli/Makefile
+++ b/packages/taler-wallet-cli/Makefile
@@ -24,7 +24,9 @@ install_target = $(prefix)/lib/taler-wallet-cli
.PHONY: install install-nodeps deps
install-nodeps:
./build-node.mjs
+ @echo installing wallet CLI to $(install_target)
install -d $(prefix)/bin
+ install -d $(install_target)/build
install -d $(install_target)/bin
install -d $(install_target)/node_modules/taler-wallet-cli
install -d $(install_target)/node_modules/taler-wallet-cli/bin
@@ -32,6 +34,8 @@ install-nodeps:
install ./dist/taler-wallet-cli-bundled.cjs $(install_target)/node_modules/taler-wallet-cli/dist/
install ./dist/taler-wallet-cli-bundled.cjs.map $(install_target)/node_modules/taler-wallet-cli/dist/
install ./bin/taler-wallet-cli.mjs $(install_target)/node_modules/taler-wallet-cli/bin/
+ install ../idb-bridge/node_modules/better-sqlite3/build/Release/better_sqlite3.node $(install_target)/build/ \
+ || echo "sqlite3 unavailable, better-sqlite3 native module not found"
ln -sf $(install_target)/node_modules/taler-wallet-cli/bin/taler-wallet-cli.mjs $(prefix)/bin/taler-wallet-cli
deps:
pnpm install --frozen-lockfile --filter @gnu-taler/taler-wallet-cli...
diff --git a/packages/taler-wallet-cli/README.md b/packages/taler-wallet-cli/README.md
index c75ef8130..27b2373c9 100644
--- a/packages/taler-wallet-cli/README.md
+++ b/packages/taler-wallet-cli/README.md
@@ -2,3 +2,8 @@
This package provides `taler-wallet-cli`, the command-line interface for the
GNU Taler wallet.
+
+## sqlite3 backend
+
+To be able to use the sqlite3 backend, make sure that better-sqlite3
+is installed as an optional dependency in the ../idb-bridge package.
diff --git a/packages/taler-wallet-cli/bin/taler-wallet-cli-local.mjs b/packages/taler-wallet-cli/bin/taler-wallet-cli-local.mjs
new file mode 100755
index 000000000..3620330b0
--- /dev/null
+++ b/packages/taler-wallet-cli/bin/taler-wallet-cli-local.mjs
@@ -0,0 +1,8 @@
+#!/usr/bin/env node
+
+// Execute the wallet CLI from the source directory.
+// This script is meant for testing and must not
+// be installed.
+
+import { main } from '../lib/index.js';
+main();
diff --git a/packages/taler-wallet-cli/build-qtart.mjs b/packages/taler-wallet-cli/build-qtart.mjs
index 0cecf3ae9..7042bf49e 100755
--- a/packages/taler-wallet-cli/build-qtart.mjs
+++ b/packages/taler-wallet-cli/build-qtart.mjs
@@ -59,7 +59,7 @@ export const buildConfig = {
conditions: ["qtart"],
sourcemap: true,
// quickjs standard library
- external: ["std", "os"],
+ external: ["std", "os", "better-sqlite3"],
define: {
__VERSION__: `"${_package.version}"`,
__GIT_HASH__: `"${GIT_HASH}"`,
diff --git a/packages/taler-wallet-core/src/db.ts b/packages/taler-wallet-core/src/db.ts
index c7d0b0bda..1d0d3a6e5 100644
--- a/packages/taler-wallet-core/src/db.ts
+++ b/packages/taler-wallet-core/src/db.ts
@@ -119,7 +119,7 @@ export const CURRENT_DB_CONFIG_KEY = "currentMainDbName";
* backwards-compatible way or object stores and indices
* are added.
*/
-export const WALLET_DB_MINOR_VERSION = 9;
+export const WALLET_DB_MINOR_VERSION = 10;
/**
* Ranges for operation status fields.
@@ -2675,6 +2675,9 @@ export const WalletStoresV1 = {
}),
{
byProposalId: describeIndex("byProposalId", "proposalId"),
+ byStatus: describeIndex("byStatus", "status", {
+ versionAdded: 10,
+ }),
},
),
refundItems: describeStore(
diff --git a/packages/taler-wallet-core/src/host-common.ts b/packages/taler-wallet-core/src/host-common.ts
index 21e7f1157..c56d7ed1c 100644
--- a/packages/taler-wallet-core/src/host-common.ts
+++ b/packages/taler-wallet-core/src/host-common.ts
@@ -16,7 +16,7 @@
import { WalletNotification } from "@gnu-taler/taler-util";
import { HttpRequestLibrary } from "@gnu-taler/taler-util/http";
-import { WalletConfig, WalletConfigParameter } from "./index.js";
+import { WalletConfigParameter } from "./index.js";
/**
* Helpers to initiate a wallet in a host environment.
diff --git a/packages/taler-wallet-core/src/host-impl.node.ts b/packages/taler-wallet-core/src/host-impl.node.ts
index 150bba49a..ceda7243f 100644
--- a/packages/taler-wallet-core/src/host-impl.node.ts
+++ b/packages/taler-wallet-core/src/host-impl.node.ts
@@ -27,6 +27,7 @@ import type { IDBFactory } from "@gnu-taler/idb-bridge";
import {
BridgeIDBFactory,
MemoryBackend,
+ createSqliteBackend,
shimIndexedDB,
} from "@gnu-taler/idb-bridge";
import { AccessStats } from "@gnu-taler/idb-bridge";
@@ -39,24 +40,21 @@ import { createPlatformHttpLib } from "@gnu-taler/taler-util/http";
import { SetTimeoutTimerAPI } from "./util/timer.js";
import { Wallet } from "./wallet.js";
import { DefaultNodeWalletArgs, makeTempfileId } from "./host-common.js";
+import { createNodeSqlite3Impl } from "@gnu-taler/idb-bridge/node-sqlite3-bindings";
const logger = new Logger("host-impl.node.ts");
-/**
- * Get a wallet instance with default settings for node.
- *
- * Extended version that allows getting DB stats.
- */
-export async function createNativeWalletHost2(
+interface MakeDbResult {
+ idbFactory: BridgeIDBFactory;
+ getStats: () => AccessStats;
+}
+
+async function makeFileDb(
args: DefaultNodeWalletArgs = {},
-): Promise<{
- wallet: Wallet;
- getDbStats: () => AccessStats;
-}> {
+): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend();
myBackend.enableTracing = false;
-
const storagePath = args.persistentStoragePath;
if (storagePath) {
try {
@@ -96,8 +94,41 @@ export async function createNativeWalletHost2(
BridgeIDBFactory.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
- const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory;
+ return {
+ idbFactory: myBridgeIdbFactory,
+ getStats: () => myBackend.accessStats,
+ };
+}
+async function makeSqliteDb(
+ args: DefaultNodeWalletArgs,
+): Promise<MakeDbResult> {
+ BridgeIDBFactory.enableTracing = false;
+ const imp = await createNodeSqlite3Impl();
+ const myBackend = await createSqliteBackend(imp, {
+ filename: args.persistentStoragePath ?? ":memory:",
+ });
+ myBackend.enableTracing = false;
+ const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
+ return {
+ getStats() {
+ throw Error("not implemented");
+ },
+ idbFactory: myBridgeIdbFactory,
+ };
+}
+
+/**
+ * Get a wallet instance with default settings for node.
+ *
+ * Extended version that allows getting DB stats.
+ */
+export async function createNativeWalletHost2(
+ args: DefaultNodeWalletArgs = {},
+): Promise<{
+ wallet: Wallet;
+ getDbStats: () => AccessStats;
+}> {
let myHttpLib;
if (args.httpLib) {
myHttpLib = args.httpLib;
@@ -115,7 +146,17 @@ export async function createNativeWalletHost2(
);
};
- shimIndexedDB(myBridgeIdbFactory);
+ let dbResp: MakeDbResult;
+
+ if (!args.persistentStoragePath || args.persistentStoragePath.endsWith(".json")) {
+ dbResp = await makeFileDb(args);
+ } else {
+ dbResp = await makeSqliteDb(args);
+ }
+
+ const myIdbFactory: IDBFactory = dbResp.idbFactory as any as IDBFactory;
+
+ shimIndexedDB(dbResp.idbFactory);
const myDb = await openTalerDatabase(myIdbFactory, myVersionChange);
@@ -158,6 +199,6 @@ export async function createNativeWalletHost2(
}
return {
wallet: w,
- getDbStats: () => myBackend.accessStats,
+ getDbStats: dbResp.getStats,
};
}
diff --git a/packages/taler-wallet-core/src/host-impl.qtart.ts b/packages/taler-wallet-core/src/host-impl.qtart.ts
index d10914b10..390282f8c 100644
--- a/packages/taler-wallet-core/src/host-impl.qtart.ts
+++ b/packages/taler-wallet-core/src/host-impl.qtart.ts
@@ -22,11 +22,17 @@
/**
* Imports.
*/
-import type { IDBFactory } from "@gnu-taler/idb-bridge";
+import type {
+ IDBFactory,
+ ResultRow,
+ Sqlite3Interface,
+ Sqlite3Statement,
+} from "@gnu-taler/idb-bridge";
// eslint-disable-next-line no-duplicate-imports
import {
BridgeIDBFactory,
MemoryBackend,
+ createSqliteBackend,
shimIndexedDB,
} from "@gnu-taler/idb-bridge";
import { AccessStats } from "@gnu-taler/idb-bridge";
@@ -41,12 +47,78 @@ import { DefaultNodeWalletArgs, makeTempfileId } from "./host-common.js";
const logger = new Logger("host-impl.qtart.ts");
-export async function createNativeWalletHost2(
+interface MakeDbResult {
+ idbFactory: BridgeIDBFactory;
+ getStats: () => AccessStats;
+}
+
+let numStmt = 0;
+
+export async function createQtartSqlite3Impl(): Promise<Sqlite3Interface> {
+ const tart: any = (globalThis as any)._tart;
+ if (!tart) {
+ throw Error("globalThis._qtart not defined");
+ }
+ return {
+ open(filename: string) {
+ const internalDbHandle = tart.sqlite3Open(filename);
+ return {
+ internalDbHandle,
+ close() {
+ tart.sqlite3Close(internalDbHandle);
+ },
+ prepare(stmtStr): Sqlite3Statement {
+ const stmtHandle = tart.sqlite3Prepare(internalDbHandle, stmtStr);
+ return {
+ internalStatement: stmtHandle,
+ getAll(params): ResultRow[] {
+ numStmt++;
+ return tart.sqlite3StmtGetAll(stmtHandle, params);
+ },
+ getFirst(params): ResultRow | undefined {
+ numStmt++;
+ return tart.sqlite3StmtGetFirst(stmtHandle, params);
+ },
+ run(params) {
+ numStmt++;
+ return tart.sqlite3StmtRun(stmtHandle, params);
+ },
+ };
+ },
+ exec(sqlStr): void {
+ numStmt++;
+ tart.sqlite3Exec(internalDbHandle, sqlStr);
+ },
+ };
+ },
+ };
+}
+
+async function makeSqliteDb(
+ args: DefaultNodeWalletArgs,
+): Promise<MakeDbResult> {
+ BridgeIDBFactory.enableTracing = false;
+ const imp = await createQtartSqlite3Impl();
+ const myBackend = await createSqliteBackend(imp, {
+ filename: args.persistentStoragePath ?? ":memory:",
+ });
+ myBackend.trackStats = true;
+ myBackend.enableTracing = false;
+ const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
+ return {
+ getStats() {
+ return {
+ ...myBackend.accessStats,
+ primitiveStatements: numStmt,
+ }
+ },
+ idbFactory: myBridgeIdbFactory,
+ };
+}
+
+async function makeFileDb(
args: DefaultNodeWalletArgs = {},
-): Promise<{
- wallet: Wallet;
- getDbStats: () => AccessStats;
-}> {
+): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend();
myBackend.enableTracing = false;
@@ -78,12 +150,34 @@ export async function createNativeWalletHost2(
};
}
- logger.info("done processing storage path");
+ const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
+ return {
+ idbFactory: myBridgeIdbFactory,
+ getStats: () => myBackend.accessStats,
+ };
+}
+export async function createNativeWalletHost2(
+ args: DefaultNodeWalletArgs = {},
+): Promise<{
+ wallet: Wallet;
+ getDbStats: () => AccessStats;
+}> {
BridgeIDBFactory.enableTracing = false;
- const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
- const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory;
+ let dbResp: MakeDbResult;
+
+ if (args.persistentStoragePath && args.persistentStoragePath.endsWith(".json")) {
+ logger.info("using JSON file backend (slow!)");
+ dbResp = await makeFileDb(args);
+ } else {
+ logger.info("using JSON file backend (experimental!)");
+ dbResp = await makeSqliteDb(args)
+ }
+
+ const myIdbFactory: IDBFactory = dbResp.idbFactory as any as IDBFactory;
+
+ shimIndexedDB(dbResp.idbFactory);
let myHttpLib;
if (args.httpLib) {
@@ -102,8 +196,6 @@ export async function createNativeWalletHost2(
);
};
- shimIndexedDB(myBridgeIdbFactory);
-
const myDb = await openTalerDatabase(myIdbFactory, myVersionChange);
let workerFactory;
@@ -124,6 +216,6 @@ export async function createNativeWalletHost2(
}
return {
wallet: w,
- getDbStats: () => myBackend.accessStats,
+ getDbStats: dbResp.getStats,
};
}
diff --git a/packages/taler-wallet-core/src/host.ts b/packages/taler-wallet-core/src/host.ts
index 4b319f081..feccf42a6 100644
--- a/packages/taler-wallet-core/src/host.ts
+++ b/packages/taler-wallet-core/src/host.ts
@@ -16,7 +16,6 @@
import { DefaultNodeWalletArgs } from "./host-common.js";
import { Wallet } from "./index.js";
-
import * as hostImpl from "#host-impl";
import { AccessStats } from "@gnu-taler/idb-bridge";
diff --git a/packages/taler-wallet-core/src/operations/pending.ts b/packages/taler-wallet-core/src/operations/pending.ts
index cc9217d67..6c6546f83 100644
--- a/packages/taler-wallet-core/src/operations/pending.ts
+++ b/packages/taler-wallet-core/src/operations/pending.ts
@@ -34,13 +34,24 @@ import {
WithdrawalGroupStatus,
RewardRecordStatus,
DepositOperationStatus,
+ RefreshGroupRecord,
+ WithdrawalGroupRecord,
+ DepositGroupRecord,
+ RewardRecord,
+ PurchaseRecord,
+ PeerPullPaymentInitiationRecord,
+ PeerPullPaymentIncomingRecord,
+ PeerPushPaymentInitiationRecord,
+ PeerPushPaymentIncomingRecord,
+ RefundGroupRecord,
+ RefundGroupStatus,
} from "../db.js";
import {
PendingOperationsResponse,
PendingTaskType,
TaskId,
} from "../pending-types.js";
-import { AbsoluteTime } from "@gnu-taler/taler-util";
+import { AbsoluteTime, TransactionRecordFilter } from "@gnu-taler/taler-util";
import { InternalWalletState } from "../internal-wallet-state.js";
import { GetReadOnlyAccess } from "../util/query.js";
import { GlobalIDB } from "@gnu-taler/idb-bridge";
@@ -105,6 +116,32 @@ async function gatherExchangePending(
});
}
+/**
+ * Iterate refresh records based on a filter.
+ */
+export async function iterRecordsForRefresh(
+ tx: GetReadOnlyAccess<{
+ refreshGroups: typeof WalletStoresV1.refreshGroups;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: RefreshGroupRecord) => Promise<void>,
+): Promise<void> {
+ let refreshGroups: RefreshGroupRecord[];
+ if (filter.onlyState === "nonfinal") {
+ const keyRange = GlobalIDB.KeyRange.bound(
+ OperationStatusRange.ACTIVE_START,
+ OperationStatusRange.ACTIVE_END,
+ );
+ refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll(keyRange);
+ } else {
+ refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll();
+ }
+
+ for (const r of refreshGroups) {
+ await f(r);
+ }
+}
+
async function gatherRefreshPending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
@@ -114,22 +151,13 @@ async function gatherRefreshPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const keyRange = GlobalIDB.KeyRange.bound(
- OperationStatusRange.ACTIVE_START,
- OperationStatusRange.ACTIVE_END,
- );
- const refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll(
- keyRange,
- );
- for (const r of refreshGroups) {
+ await iterRecordsForRefresh(tx, { onlyState: "nonfinal" }, async (r) => {
if (r.timestampFinished) {
return;
}
const opId = TaskIdentifiers.forRefresh(r);
const retryRecord = await tx.operationRetries.get(opId);
-
const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
-
resp.pendingOperations.push({
type: PendingTaskType.Refresh,
...getPendingCommon(ws, opId, timestampDue),
@@ -140,6 +168,30 @@ async function gatherRefreshPending(
),
retryInfo: retryRecord?.retryInfo,
});
+ });
+}
+
+export async function iterRecordsForWithdrawal(
+ tx: GetReadOnlyAccess<{
+ withdrawalGroups: typeof WalletStoresV1.withdrawalGroups;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: WithdrawalGroupRecord) => Promise<void>,
+): Promise<void> {
+ let withdrawalGroupRecords: WithdrawalGroupRecord[];
+ if (filter.onlyState === "nonfinal") {
+ const range = GlobalIDB.KeyRange.bound(
+ WithdrawalGroupStatus.PendingRegisteringBank,
+ WithdrawalGroupStatus.PendingAml,
+ );
+ withdrawalGroupRecords =
+ await tx.withdrawalGroups.indexes.byStatus.getAll(range);
+ } else {
+ withdrawalGroupRecords =
+ await tx.withdrawalGroups.indexes.byStatus.getAll();
+ }
+ for (const wgr of withdrawalGroupRecords) {
+ await f(wgr);
}
}
@@ -153,12 +205,7 @@ async function gatherWithdrawalPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const range = GlobalIDB.KeyRange.bound(
- WithdrawalGroupStatus.PendingRegisteringBank,
- WithdrawalGroupStatus.PendingAml,
- );
- const wsrs = await tx.withdrawalGroups.indexes.byStatus.getAll(range);
- for (const wsr of wsrs) {
+ await iterRecordsForWithdrawal(tx, { onlyState: "nonfinal" }, async (wsr) => {
const opTag = TaskIdentifiers.forWithdrawal(wsr);
let opr = await tx.operationRetries.get(opTag);
const now = AbsoluteTime.now();
@@ -184,6 +231,30 @@ async function gatherWithdrawalPending(
lastError: opr.lastError,
retryInfo: opr.retryInfo,
});
+ });
+}
+
+export async function iterRecordsForDeposit(
+ tx: GetReadOnlyAccess<{
+ depositGroups: typeof WalletStoresV1.depositGroups;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: DepositGroupRecord) => Promise<void>,
+): Promise<void> {
+ let dgs: DepositGroupRecord[];
+ if (filter.onlyState === "nonfinal") {
+ dgs = await tx.depositGroups.indexes.byStatus.getAll(
+ GlobalIDB.KeyRange.bound(
+ DepositOperationStatus.PendingDeposit,
+ DepositOperationStatus.PendingKyc,
+ ),
+ );
+ } else {
+ dgs = await tx.depositGroups.indexes.byStatus.getAll();
+ }
+
+ for (const dg of dgs) {
+ await f(dg);
}
}
@@ -196,16 +267,7 @@ async function gatherDepositPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const dgs = await tx.depositGroups.indexes.byStatus.getAll(
- GlobalIDB.KeyRange.bound(
- DepositOperationStatus.PendingDeposit,
- DepositOperationStatus.PendingKyc,
- ),
- );
- for (const dg of dgs) {
- if (dg.timestampFinished) {
- return;
- }
+ await iterRecordsForDeposit(tx, { onlyState: "nonfinal" }, async (dg) => {
let deposited = true;
for (const d of dg.depositedPerCoin) {
if (!d) {
@@ -226,10 +288,28 @@ async function gatherDepositPending(
lastError: retryRecord?.lastError,
retryInfo: retryRecord?.retryInfo,
});
+ });
+}
+
+export async function iterRecordsForReward(
+ tx: GetReadOnlyAccess<{
+ rewards: typeof WalletStoresV1.rewards;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: RewardRecord) => Promise<void>,
+): Promise<void> {
+ if (filter.onlyState === "nonfinal") {
+ const range = GlobalIDB.KeyRange.bound(
+ RewardRecordStatus.PendingPickup,
+ RewardRecordStatus.PendingPickup,
+ );
+ await tx.rewards.indexes.byStatus.iter(range).forEachAsync(f);
+ } else {
+ await tx.rewards.indexes.byStatus.iter().forEachAsync(f);
}
}
-async function gatherTipPending(
+async function gatherRewardPending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
rewards: typeof WalletStoresV1.rewards;
@@ -238,15 +318,7 @@ async function gatherTipPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const range = GlobalIDB.KeyRange.bound(
- RewardRecordStatus.PendingPickup,
- RewardRecordStatus.PendingPickup,
- );
- await tx.rewards.indexes.byStatus.iter(range).forEachAsync(async (tip) => {
- // FIXME: The tip record needs a proper status field!
- if (tip.pickedUpTimestamp) {
- return;
- }
+ await iterRecordsForReward(tx, { onlyState: "nonfinal" }, async (tip) => {
const opId = TaskIdentifiers.forTipPickup(tip);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
@@ -264,6 +336,43 @@ async function gatherTipPending(
});
}
+export async function iterRecordsForRefund(
+ tx: GetReadOnlyAccess<{
+ refundGroups: typeof WalletStoresV1.refundGroups;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: RefundGroupRecord) => Promise<void>,
+): Promise<void> {
+ if (filter.onlyState === "nonfinal") {
+ const keyRange = GlobalIDB.KeyRange.only(
+ RefundGroupStatus.Pending
+ );
+ await tx.refundGroups.indexes.byStatus
+ .iter(keyRange)
+ .forEachAsync(f);
+ } else {
+ await tx.refundGroups.iter().forEachAsync(f);
+ }
+}
+
+export async function iterRecordsForPurchase(
+ tx: GetReadOnlyAccess<{
+ purchases: typeof WalletStoresV1.purchases;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: PurchaseRecord) => Promise<void>,
+): Promise<void> {
+ if (filter.onlyState === "nonfinal") {
+ const keyRange = GlobalIDB.KeyRange.bound(
+ PurchaseStatus.PendingDownloadingProposal,
+ PurchaseStatus.PendingAcceptRefund,
+ );
+ await tx.purchases.indexes.byStatus.iter(keyRange).forEachAsync(f);
+ } else {
+ await tx.purchases.indexes.byStatus.iter().forEachAsync(f);
+ }
+}
+
async function gatherPurchasePending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
@@ -273,27 +382,20 @@ async function gatherPurchasePending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const keyRange = GlobalIDB.KeyRange.bound(
- PurchaseStatus.PendingDownloadingProposal,
- PurchaseStatus.PendingAcceptRefund,
- );
- await tx.purchases.indexes.byStatus
- .iter(keyRange)
- .forEachAsync(async (pr) => {
- const opId = TaskIdentifiers.forPay(pr);
- const retryRecord = await tx.operationRetries.get(opId);
- const timestampDue =
- retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
- resp.pendingOperations.push({
- type: PendingTaskType.Purchase,
- ...getPendingCommon(ws, opId, timestampDue),
- givesLifeness: true,
- statusStr: PurchaseStatus[pr.purchaseStatus],
- proposalId: pr.proposalId,
- retryInfo: retryRecord?.retryInfo,
- lastError: retryRecord?.lastError,
- });
+ await iterRecordsForPurchase(tx, { onlyState: "nonfinal" }, async (pr) => {
+ const opId = TaskIdentifiers.forPay(pr);
+ const retryRecord = await tx.operationRetries.get(opId);
+ const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
+ resp.pendingOperations.push({
+ type: PendingTaskType.Purchase,
+ ...getPendingCommon(ws, opId, timestampDue),
+ givesLifeness: true,
+ statusStr: PurchaseStatus[pr.purchaseStatus],
+ proposalId: pr.proposalId,
+ retryInfo: retryRecord?.retryInfo,
+ lastError: retryRecord?.lastError,
});
+ });
}
async function gatherRecoupPending(
@@ -362,6 +464,26 @@ async function gatherBackupPending(
});
}
+export async function iterRecordsForPeerPullInitiation(
+ tx: GetReadOnlyAccess<{
+ peerPullPaymentInitiations: typeof WalletStoresV1.peerPullPaymentInitiations;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: PeerPullPaymentInitiationRecord) => Promise<void>,
+): Promise<void> {
+ if (filter.onlyState === "nonfinal") {
+ const keyRange = GlobalIDB.KeyRange.bound(
+ PeerPullPaymentInitiationStatus.PendingCreatePurse,
+ PeerPullPaymentInitiationStatus.AbortingDeletePurse,
+ );
+ await tx.peerPullPaymentInitiations.indexes.byStatus
+ .iter(keyRange)
+ .forEachAsync(f);
+ } else {
+ await tx.peerPullPaymentInitiations.indexes.byStatus.iter().forEachAsync(f);
+ }
+}
+
async function gatherPeerPullInitiationPending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
@@ -371,13 +493,10 @@ async function gatherPeerPullInitiationPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const keyRange = GlobalIDB.KeyRange.bound(
- PeerPullPaymentInitiationStatus.PendingCreatePurse,
- PeerPullPaymentInitiationStatus.AbortingDeletePurse,
- );
- await tx.peerPullPaymentInitiations.indexes.byStatus
- .iter(keyRange)
- .forEachAsync(async (pi) => {
+ await iterRecordsForPeerPullInitiation(
+ tx,
+ { onlyState: "nonfinal" },
+ async (pi) => {
const opId = TaskIdentifiers.forPeerPullPaymentInitiation(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@@ -389,7 +508,28 @@ async function gatherPeerPullInitiationPending(
retryInfo: retryRecord?.retryInfo,
pursePub: pi.pursePub,
});
- });
+ },
+ );
+}
+
+export async function iterRecordsForPeerPullDebit(
+ tx: GetReadOnlyAccess<{
+ peerPullPaymentIncoming: typeof WalletStoresV1.peerPullPaymentIncoming;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: PeerPullPaymentIncomingRecord) => Promise<void>,
+): Promise<void> {
+ if (filter.onlyState === "nonfinal") {
+ const keyRange = GlobalIDB.KeyRange.bound(
+ PeerPullDebitRecordStatus.PendingDeposit,
+ PeerPullDebitRecordStatus.AbortingRefresh,
+ );
+ await tx.peerPullPaymentIncoming.indexes.byStatus
+ .iter(keyRange)
+ .forEachAsync(f);
+ } else {
+ await tx.peerPullPaymentIncoming.indexes.byStatus.iter().forEachAsync(f);
+ }
}
async function gatherPeerPullDebitPending(
@@ -401,13 +541,10 @@ async function gatherPeerPullDebitPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const keyRange = GlobalIDB.KeyRange.bound(
- PeerPullDebitRecordStatus.PendingDeposit,
- PeerPullDebitRecordStatus.AbortingRefresh,
- );
- await tx.peerPullPaymentIncoming.indexes.byStatus
- .iter(keyRange)
- .forEachAsync(async (pi) => {
+ await iterRecordsForPeerPullDebit(
+ tx,
+ { onlyState: "nonfinal" },
+ async (pi) => {
const opId = TaskIdentifiers.forPeerPullPaymentDebit(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@@ -419,7 +556,28 @@ async function gatherPeerPullDebitPending(
retryInfo: retryRecord?.retryInfo,
peerPullPaymentIncomingId: pi.peerPullPaymentIncomingId,
});
- });
+ },
+ );
+}
+
+export async function iterRecordsForPeerPushInitiation(
+ tx: GetReadOnlyAccess<{
+ peerPushPaymentInitiations: typeof WalletStoresV1.peerPushPaymentInitiations;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: PeerPushPaymentInitiationRecord) => Promise<void>,
+): Promise<void> {
+ if (filter.onlyState === "nonfinal") {
+ const keyRange = GlobalIDB.KeyRange.bound(
+ PeerPushPaymentInitiationStatus.PendingCreatePurse,
+ PeerPushPaymentInitiationStatus.AbortingRefresh,
+ );
+ await tx.peerPushPaymentInitiations.indexes.byStatus
+ .iter(keyRange)
+ .forEachAsync(f);
+ } else {
+ await tx.peerPushPaymentInitiations.indexes.byStatus.iter().forEachAsync(f);
+ }
}
async function gatherPeerPushInitiationPending(
@@ -431,13 +589,10 @@ async function gatherPeerPushInitiationPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
- const keyRange = GlobalIDB.KeyRange.bound(
- PeerPushPaymentInitiationStatus.PendingCreatePurse,
- PeerPushPaymentInitiationStatus.AbortingRefresh,
- );
- await tx.peerPushPaymentInitiations.indexes.byStatus
- .iter(keyRange)
- .forEachAsync(async (pi) => {
+ await iterRecordsForPeerPushInitiation(
+ tx,
+ { onlyState: "nonfinal" },
+ async (pi) => {
const opId = TaskIdentifiers.forPeerPushPaymentInitiation(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@@ -449,7 +604,28 @@ async function gatherPeerPushInitiationPending(
retryInfo: retryRecord?.retryInfo,
pursePub: pi.pursePub,
});
- });
+ },
+ );
+}
+
+export async function iterRecordsForPeerPushCredit(
+ tx: GetReadOnlyAccess<{
+ peerPushPaymentIncoming: typeof WalletStoresV1.peerPushPaymentIncoming;
+ }>,
+ filter: TransactionRecordFilter,
+ f: (r: PeerPushPaymentIncomingRecord) => Promise<void>,
+): Promise<void> {
+ if (filter.onlyState === "nonfinal") {
+ const keyRange = GlobalIDB.KeyRange.bound(
+ PeerPushPaymentIncomingStatus.PendingMerge,
+ PeerPushPaymentIncomingStatus.PendingWithdrawing,
+ );
+ await tx.peerPushPaymentIncoming.indexes.byStatus
+ .iter(keyRange)
+ .forEachAsync(f);
+ } else {
+ await tx.peerPushPaymentIncoming.indexes.byStatus.iter().forEachAsync(f);
+ }
}
async function gatherPeerPushCreditPending(
@@ -465,9 +641,10 @@ async function gatherPeerPushCreditPending(
PeerPushPaymentIncomingStatus.PendingMerge,
PeerPushPaymentIncomingStatus.PendingWithdrawing,
);
- await tx.peerPushPaymentIncoming.indexes.byStatus
- .iter(keyRange)
- .forEachAsync(async (pi) => {
+ await iterRecordsForPeerPushCredit(
+ tx,
+ { onlyState: "nonfinal" },
+ async (pi) => {
const opId = TaskIdentifiers.forPeerPushCredit(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@@ -479,7 +656,8 @@ async function gatherPeerPushCreditPending(
retryInfo: retryRecord?.retryInfo,
peerPushPaymentIncomingId: pi.peerPushPaymentIncomingId,
});
- });
+ },
+ );
}
export async function getPendingOperations(
@@ -513,7 +691,7 @@ export async function getPendingOperations(
await gatherRefreshPending(ws, tx, now, resp);
await gatherWithdrawalPending(ws, tx, now, resp);
await gatherDepositPending(ws, tx, now, resp);
- await gatherTipPending(ws, tx, now, resp);
+ await gatherRewardPending(ws, tx, now, resp);
await gatherPurchasePending(ws, tx, now, resp);
await gatherRecoupPending(ws, tx, now, resp);
await gatherBackupPending(ws, tx, now, resp);
diff --git a/packages/taler-wallet-core/src/operations/testing.ts b/packages/taler-wallet-core/src/operations/testing.ts
index ea373e914..3090549d5 100644
--- a/packages/taler-wallet-core/src/operations/testing.ts
+++ b/packages/taler-wallet-core/src/operations/testing.ts
@@ -472,12 +472,15 @@ export async function waitUntilDone(ws: InternalWalletState): Promise<void> {
p = openPromise();
const txs = await getTransactions(ws, {
includeRefreshes: true,
+ filterByState: "nonfinal",
});
let finished = true;
for (const tx of txs.transactions) {
switch (tx.txState.major) {
case TransactionMajorState.Pending:
case TransactionMajorState.Aborting:
+ case TransactionMajorState.Suspended:
+ case TransactionMajorState.SuspendedAborting:
finished = false;
logger.info(
`continuing waiting, ${tx.transactionId} in ${tx.txState.major}(${tx.txState.minor})`,
diff --git a/packages/taler-wallet-core/src/operations/transactions.ts b/packages/taler-wallet-core/src/operations/transactions.ts
index a16809b36..af04cb161 100644
--- a/packages/taler-wallet-core/src/operations/transactions.ts
+++ b/packages/taler-wallet-core/src/operations/transactions.ts
@@ -36,6 +36,7 @@ import {
TransactionByIdRequest,
TransactionIdStr,
TransactionMajorState,
+ TransactionRecordFilter,
TransactionsRequest,
TransactionsResponse,
TransactionState,
@@ -153,6 +154,7 @@ import {
resumePeerPushDebitTransaction,
abortPeerPushDebitTransaction,
} from "./pay-peer-push-debit.js";
+import { iterRecordsForDeposit, iterRecordsForPeerPullDebit, iterRecordsForPeerPullInitiation, iterRecordsForPeerPushCredit, iterRecordsForPeerPushInitiation, iterRecordsForPurchase, iterRecordsForRefresh, iterRecordsForRefund, iterRecordsForReward, iterRecordsForWithdrawal } from "./pending.js";
const logger = new Logger("taler-wallet-core:transactions.ts");
@@ -929,6 +931,11 @@ export async function getTransactions(
): Promise<TransactionsResponse> {
const transactions: Transaction[] = [];
+ const filter: TransactionRecordFilter = {};
+ if (transactionsRequest?.filterByState) {
+ filter.onlyState = transactionsRequest.filterByState;
+ }
+
await ws.db
.mktx((x) => [
x.coins,
@@ -952,7 +959,7 @@ export async function getTransactions(
x.refundGroups,
])
.runReadOnly(async (tx) => {
- tx.peerPushPaymentInitiations.iter().forEachAsync(async (pi) => {
+ await iterRecordsForPeerPushInitiation(tx, filter, async (pi) => {
const amount = Amounts.parseOrThrow(pi.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
@@ -968,7 +975,7 @@ export async function getTransactions(
);
});
- tx.peerPullPaymentIncoming.iter().forEachAsync(async (pi) => {
+ await iterRecordsForPeerPullDebit(tx, filter, async (pi) => {
const amount = Amounts.parseOrThrow(pi.contractTerms.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
return;
@@ -986,7 +993,7 @@ export async function getTransactions(
transactions.push(buildTransactionForPullPaymentDebit(pi));
});
- tx.peerPushPaymentIncoming.iter().forEachAsync(async (pi) => {
+ await iterRecordsForPeerPushCredit(tx, filter, async (pi) => {
if (!pi.currency) {
// Legacy transaction
return;
@@ -1026,8 +1033,8 @@ export async function getTransactions(
),
);
});
-
- tx.peerPullPaymentInitiations.iter().forEachAsync(async (pi) => {
+
+ await iterRecordsForPeerPullInitiation(tx, filter, async (pi) => {
const currency = Amounts.currencyOf(pi.amount);
if (shouldSkipCurrency(transactionsRequest, currency)) {
return;
@@ -1060,7 +1067,7 @@ export async function getTransactions(
);
});
- tx.refundGroups.iter().forEachAsync(async (refundGroup) => {
+ await iterRecordsForRefund(tx, filter, async (refundGroup) => {
const currency = Amounts.currencyOf(refundGroup.amountRaw);
if (shouldSkipCurrency(transactionsRequest, currency)) {
return;
@@ -1071,8 +1078,8 @@ export async function getTransactions(
);
transactions.push(buildTransactionForRefund(refundGroup, contractData));
});
-
- tx.refreshGroups.iter().forEachAsync(async (rg) => {
+
+ await iterRecordsForRefresh(tx, filter, async (rg) => {
if (shouldSkipCurrency(transactionsRequest, rg.currency)) {
return;
}
@@ -1092,7 +1099,7 @@ export async function getTransactions(
}
});
- tx.withdrawalGroups.iter().forEachAsync(async (wsr) => {
+ await iterRecordsForWithdrawal(tx, filter ,async (wsr) => {
if (
shouldSkipCurrency(
transactionsRequest,
@@ -1146,7 +1153,7 @@ export async function getTransactions(
}
});
- tx.depositGroups.iter().forEachAsync(async (dg) => {
+ await iterRecordsForDeposit(tx, filter, async (dg) => {
const amount = Amounts.parseOrThrow(dg.contractTermsRaw.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
return;
@@ -1157,7 +1164,7 @@ export async function getTransactions(
transactions.push(buildTransactionForDeposit(dg, retryRecord));
});
- tx.purchases.iter().forEachAsync(async (purchase) => {
+ await iterRecordsForPurchase(tx, filter, async (purchase) => {
const download = purchase.download;
if (!download) {
return;
@@ -1200,7 +1207,7 @@ export async function getTransactions(
);
});
- tx.rewards.iter().forEachAsync(async (tipRecord) => {
+ await iterRecordsForReward(tx, filter, async (tipRecord) => {
if (
shouldSkipCurrency(
transactionsRequest,
diff --git a/packages/taler-wallet-core/src/util/query.ts b/packages/taler-wallet-core/src/util/query.ts
index 1de1e9a0d..527cbdf63 100644
--- a/packages/taler-wallet-core/src/util/query.ts
+++ b/packages/taler-wallet-core/src/util/query.ts
@@ -338,7 +338,7 @@ interface IndexReadOnlyAccessor<RecordType> {
iter(query?: IDBKeyRange | IDBValidKey): ResultStream<RecordType>;
get(query: IDBValidKey): Promise<RecordType | undefined>;
getAll(
- query: IDBKeyRange | IDBValidKey,
+ query?: IDBKeyRange | IDBValidKey,
count?: number,
): Promise<RecordType[]>;
}
@@ -351,7 +351,7 @@ interface IndexReadWriteAccessor<RecordType> {
iter(query: IDBKeyRange | IDBValidKey): ResultStream<RecordType>;
get(query: IDBValidKey): Promise<RecordType | undefined>;
getAll(
- query: IDBKeyRange | IDBValidKey,
+ query?: IDBKeyRange | IDBValidKey,
count?: number,
): Promise<RecordType[]>;
}
diff --git a/packages/taler-wallet-core/src/wallet.ts b/packages/taler-wallet-core/src/wallet.ts
index 796a96f14..8cd9bb8c3 100644
--- a/packages/taler-wallet-core/src/wallet.ts
+++ b/packages/taler-wallet-core/src/wallet.ts
@@ -479,6 +479,7 @@ async function runTaskLoop(
// Wait until either the timeout, or we are notified (via the latch)
// that more work might be available.
await Promise.race([timeout, ws.workAvailable.wait()]);
+ logger.trace(`done waiting for available work`);
} else {
logger.trace(
`running ${pending.pendingOperations.length} pending operations`,
diff --git a/packages/taler-wallet-embedded/build.mjs b/packages/taler-wallet-embedded/build.mjs
index 28351e6e5..233660af1 100755
--- a/packages/taler-wallet-embedded/build.mjs
+++ b/packages/taler-wallet-embedded/build.mjs
@@ -51,7 +51,7 @@ export const buildConfig = {
target: [
'es2020'
],
- external: ["os", "std"],
+ external: ["os", "std", "better-sqlite3"],
format: 'esm',
platform: 'neutral',
mainFields: ["module", "main"],
diff --git a/packages/taler-wallet-embedded/src/wallet-qjs.ts b/packages/taler-wallet-embedded/src/wallet-qjs.ts
index e475f9542..5e2f1e0a4 100644
--- a/packages/taler-wallet-embedded/src/wallet-qjs.ts
+++ b/packages/taler-wallet-embedded/src/wallet-qjs.ts
@@ -27,9 +27,9 @@ import {
CoreApiResponseSuccess,
getErrorDetailFromException,
InitRequest,
+ j2s,
Logger,
setGlobalLogLevelFromString,
- setPRNG,
WalletNotification,
} from "@gnu-taler/taler-util";
import { createPlatformHttpLib } from "@gnu-taler/taler-util/http";
@@ -47,20 +47,11 @@ import {
getRecoveryStartState,
discoverPolicies,
mergeDiscoveryAggregate,
- ReducerState,
} from "@gnu-taler/anastasis-core";
import { userIdentifierDerive } from "@gnu-taler/anastasis-core/lib/crypto.js";
setGlobalLogLevelFromString("trace");
-setPRNG(function (x: Uint8Array, n: number) {
- // @ts-ignore
- const va = globalThis._tart.randomBytes(n);
- const v = new Uint8Array(va);
- for (let i = 0; i < n; i++) x[i] = v[i];
- for (let i = 0; i < v.length; i++) v[i] = 0;
-});
-
const logger = new Logger("taler-wallet-embedded/index.ts");
/**
@@ -222,6 +213,8 @@ async function handleAnastasisRequest(
cursor: discoverRes.cursor,
},
});
+ default:
+ throw Error("unsupported anastasis operation");
}
}
@@ -295,10 +288,10 @@ export async function testWithGv() {
});
}
-export async function testWithLocal() {
+export async function testWithLocal(path: string) {
console.log("running local test");
const w = await createNativeWalletHost2({
- persistentStoragePath: "walletdb.json",
+ persistentStoragePath: path ?? "walletdb.json",
config: {
features: {
allowHttp: true,
@@ -310,7 +303,7 @@ export async function testWithLocal() {
skipDefaults: true,
});
console.log("initialized wallet");
- await w.wallet.client.call(WalletApiOperation.RunIntegrationTestV2, {
+ await w.wallet.client.call(WalletApiOperation.RunIntegrationTest, {
amountToSpend: "TESTKUDOS:1",
amountToWithdraw: "TESTKUDOS:3",
bankAccessApiBaseUrl: "http://localhost:8082/taler-bank-access/",
@@ -323,6 +316,7 @@ export async function testWithLocal() {
});
console.log("done with task loop");
w.wallet.stop();
+ console.log("DB stats:", j2s(w.getDbStats()));
}
export async function testArgon2id() {
@@ -357,4 +351,4 @@ globalThis.testArgon2id = testArgon2id;
// @ts-ignore
globalThis.testReduceAction = reduceAction;
// @ts-ignore
-globalThis.testDiscoverPolicies = discoverPolicies; \ No newline at end of file
+globalThis.testDiscoverPolicies = discoverPolicies;
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index f7fd2f134..7fd6f7619 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -1,4 +1,4 @@
-lockfileVersion: '6.1'
+lockfileVersion: '6.0'
settings:
autoInstallPeers: true
@@ -314,27 +314,31 @@ importers:
packages/idb-bridge:
dependencies:
tslib:
- specifier: ^2.5.3
- version: 2.5.3
+ specifier: ^2.6.0
+ version: 2.6.0
+ optionalDependencies:
+ better-sqlite3:
+ specifier: ^8.4.0
+ version: 8.4.0
devDependencies:
+ '@types/better-sqlite3':
+ specifier: ^7.6.4
+ version: 7.6.4
'@types/node':
- specifier: ^18.11.17
- version: 18.11.17
+ specifier: ^20.4.1
+ version: 20.4.1
ava:
- specifier: ^4.3.3
- version: 4.3.3(@ava/typescript@4.0.0)
- esm:
- specifier: ^3.2.25
- version: 3.2.25
+ specifier: ^5.3.1
+ version: 5.3.1
prettier:
specifier: ^2.8.8
version: 2.8.8
rimraf:
- specifier: ^3.0.2
- version: 3.0.2
+ specifier: ^5.0.1
+ version: 5.0.1
typescript:
- specifier: ^5.1.3
- version: 5.1.3
+ specifier: ^5.1.6
+ version: 5.1.6
packages/merchant-backend-ui:
dependencies:
@@ -413,7 +417,7 @@ importers:
version: 1.0.14
ts-node:
specifier: ^10.9.1
- version: 10.9.1(@types/node@18.11.17)(typescript@5.1.3)
+ version: 10.9.1(@types/node@20.4.1)(typescript@5.1.3)
tslib:
specifier: 2.5.3
version: 2.5.3
@@ -4934,6 +4938,18 @@ packages:
resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==}
dev: true
+ /@isaacs/cliui@8.0.2:
+ resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==}
+ engines: {node: '>=12'}
+ dependencies:
+ string-width: 5.1.2
+ string-width-cjs: /string-width@4.2.3
+ strip-ansi: 7.0.1
+ strip-ansi-cjs: /strip-ansi@6.0.1
+ wrap-ansi: 8.1.0
+ wrap-ansi-cjs: /wrap-ansi@7.0.0
+ dev: true
+
/@istanbuljs/load-nyc-config@1.1.0:
resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==}
engines: {node: '>=8'}
@@ -5418,6 +5434,13 @@ packages:
node-gyp-build: 4.5.0
dev: true
+ /@pkgjs/parseargs@0.11.0:
+ resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==}
+ engines: {node: '>=14'}
+ requiresBuild: true
+ dev: true
+ optional: true
+
/@polka/url@1.0.0-next.21:
resolution: {integrity: sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==}
dev: true
@@ -5580,6 +5603,12 @@ packages:
resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==}
dev: true
+ /@types/better-sqlite3@7.6.4:
+ resolution: {integrity: sha512-dzrRZCYPXIXfSR1/surNbJ/grU3scTaygS0OMzjlGf71i9sc2fGyHPXXiXmEvNIoE0cGwsanEFMVJxPXmco9Eg==}
+ dependencies:
+ '@types/node': 20.4.1
+ dev: true
+
/@types/body-parser@1.19.2:
resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==}
dependencies:
@@ -5707,6 +5736,10 @@ packages:
/@types/node@18.11.17:
resolution: {integrity: sha512-HJSUJmni4BeDHhfzn6nF0sVmd1SMezP7/4F0Lq+aXzmp2xm9O7WXrUtHW/CHlYVtZUbByEvWidHqRtcJXGF2Ng==}
+ /@types/node@20.4.1:
+ resolution: {integrity: sha512-JIzsAvJeA/5iY6Y/OxZbv1lUcc8dNSE77lb2gnBH+/PJ3lFR1Ccvgwl5JWnHAkNHcRsT0TbpVOsiMKZ1F/yyJg==}
+ dev: true
+
/@types/parse-json@4.0.0:
resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==}
dev: true
@@ -6323,7 +6356,7 @@ packages:
engines: {node: '>=14.15.0'}
dependencies:
js-yaml: 3.14.1
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
/@zkochan/js-yaml@0.0.6:
@@ -6388,6 +6421,7 @@ packages:
/acorn@6.4.2:
resolution: {integrity: sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==}
engines: {node: '>=0.4.0'}
+ hasBin: true
dev: true
/acorn@7.4.1:
@@ -6553,6 +6587,7 @@ packages:
/anymatch@2.0.0:
resolution: {integrity: sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==}
+ requiresBuild: true
dependencies:
micromatch: 3.1.10
normalize-path: 2.1.1
@@ -6755,6 +6790,7 @@ packages:
/async-each@1.0.3:
resolution: {integrity: sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==}
+ requiresBuild: true
dev: true
optional: true
@@ -6855,6 +6891,63 @@ packages:
- supports-color
dev: true
+ /ava@5.3.1:
+ resolution: {integrity: sha512-Scv9a4gMOXB6+ni4toLuhAm9KYWEjsgBglJl+kMGI5+IVDt120CCDZyB5HNU9DjmLI2t4I0GbnxGLmmRfGTJGg==}
+ engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'}
+ hasBin: true
+ peerDependencies:
+ '@ava/typescript': '*'
+ peerDependenciesMeta:
+ '@ava/typescript':
+ optional: true
+ dependencies:
+ acorn: 8.8.2
+ acorn-walk: 8.2.0
+ ansi-styles: 6.2.1
+ arrgv: 1.0.2
+ arrify: 3.0.0
+ callsites: 4.0.0
+ cbor: 8.1.0
+ chalk: 5.3.0
+ chokidar: 3.5.3
+ chunkd: 2.0.1
+ ci-info: 3.8.0
+ ci-parallel-vars: 1.0.1
+ clean-yaml-object: 0.1.0
+ cli-truncate: 3.1.0
+ code-excerpt: 4.0.0
+ common-path-prefix: 3.0.0
+ concordance: 5.0.4
+ currently-unhandled: 0.4.1
+ debug: 4.3.4
+ emittery: 1.0.1
+ figures: 5.0.0
+ globby: 13.2.2
+ ignore-by-default: 2.1.0
+ indent-string: 5.0.0
+ is-error: 2.2.2
+ is-plain-object: 5.0.0
+ is-promise: 4.0.0
+ matcher: 5.0.0
+ mem: 9.0.2
+ ms: 2.1.3
+ p-event: 5.0.1
+ p-map: 5.5.0
+ picomatch: 2.3.1
+ pkg-conf: 4.0.0
+ plur: 5.1.0
+ pretty-ms: 8.0.0
+ resolve-cwd: 3.0.0
+ stack-utils: 2.0.6
+ strip-ansi: 7.0.1
+ supertap: 3.0.1
+ temp-dir: 3.0.0
+ write-file-atomic: 5.0.1
+ yargs: 17.7.2
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
/aws-sign2@0.7.0:
resolution: {integrity: sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==}
dev: true
@@ -7097,7 +7190,6 @@ packages:
/base64-js@1.5.1:
resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
- dev: true
/base@0.11.2:
resolution: {integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==}
@@ -7122,6 +7214,15 @@ packages:
tweetnacl: 0.14.5
dev: true
+ /better-sqlite3@8.4.0:
+ resolution: {integrity: sha512-NmsNW1CQvqMszu/CFAJ3pLct6NEFlNfuGM6vw72KHkjOD1UDnL96XNN1BMQc1hiHo8vE2GbOWQYIpZ+YM5wrZw==}
+ requiresBuild: true
+ dependencies:
+ bindings: 1.5.0
+ prebuild-install: 7.1.1
+ dev: false
+ optional: true
+
/big-integer@1.6.51:
resolution: {integrity: sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==}
engines: {node: '>=0.6'}
@@ -7138,6 +7239,7 @@ packages:
/binary-extensions@1.13.1:
resolution: {integrity: sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==}
engines: {node: '>=0.10.0'}
+ requiresBuild: true
dev: true
optional: true
@@ -7150,7 +7252,6 @@ packages:
requiresBuild: true
dependencies:
file-uri-to-path: 1.0.0
- dev: true
optional: true
/bl@4.1.0:
@@ -7159,7 +7260,6 @@ packages:
buffer: 5.7.1
inherits: 2.0.4
readable-stream: 3.6.0
- dev: true
/bluebird@3.7.2:
resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==}
@@ -7367,7 +7467,6 @@ packages:
dependencies:
base64-js: 1.5.1
ieee754: 1.2.1
- dev: true
/builtin-modules@3.3.0:
resolution: {integrity: sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==}
@@ -7581,7 +7680,7 @@ packages:
resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==}
dependencies:
pascal-case: 3.1.2
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
/camelcase-css@2.0.1:
@@ -7689,12 +7788,19 @@ packages:
engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
dev: true
+ /chalk@5.3.0:
+ resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==}
+ engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
+ dev: true
+
/check-error@1.0.2:
resolution: {integrity: sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==}
dev: true
/chokidar@2.1.8:
resolution: {integrity: sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==}
+ deprecated: Chokidar 2 does not receive security updates since 2019. Upgrade to chokidar 3 with 15x fewer dependencies
+ requiresBuild: true
dependencies:
anymatch: 2.0.0
async-each: 1.0.3
@@ -7730,7 +7836,6 @@ packages:
/chownr@1.1.4:
resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==}
- dev: true
/chownr@2.0.0:
resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==}
@@ -7758,6 +7863,11 @@ packages:
resolution: {integrity: sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw==}
dev: true
+ /ci-info@3.8.0:
+ resolution: {integrity: sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==}
+ engines: {node: '>=8'}
+ dev: true
+
/ci-parallel-vars@1.0.1:
resolution: {integrity: sha512-uvzpYrpmidaoxvIQHM+rKSrigjOe9feHYbw4uOI2gdfe1C3xIlxO+kVXq83WQWNniTf8bAxVpy+cQeFQsMERKg==}
dev: true
@@ -8691,6 +8801,15 @@ packages:
mimic-response: 1.0.1
dev: true
+ /decompress-response@6.0.0:
+ resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==}
+ engines: {node: '>=10'}
+ requiresBuild: true
+ dependencies:
+ mimic-response: 3.1.0
+ dev: false
+ optional: true
+
/deep-eql@3.0.1:
resolution: {integrity: sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==}
engines: {node: '>=0.12'}
@@ -8701,7 +8820,7 @@ packages:
/deep-extend@0.6.0:
resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==}
engines: {node: '>=4.0.0'}
- dev: true
+ requiresBuild: true
/deep-is@0.1.4:
resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==}
@@ -8827,6 +8946,13 @@ packages:
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
dev: true
+ /detect-libc@2.0.2:
+ resolution: {integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw==}
+ engines: {node: '>=8'}
+ requiresBuild: true
+ dev: false
+ optional: true
+
/detect-node@2.1.0:
resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==}
dev: true
@@ -8953,7 +9079,7 @@ packages:
resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==}
dependencies:
no-case: 3.0.4
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
/dot-prop@5.3.0:
@@ -9044,6 +9170,11 @@ packages:
engines: {node: '>=12'}
dev: true
+ /emittery@1.0.1:
+ resolution: {integrity: sha512-2ID6FdrMD9KDLldGesP6317G78K7km/kMcwItRtVFva7I/cSEOIaLpewaUb+YLXVwdAp3Ctfxh/V5zIl1sj7dQ==}
+ engines: {node: '>=14.16'}
+ dev: true
+
/emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
dev: true
@@ -9077,7 +9208,6 @@ packages:
resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==}
dependencies:
once: 1.4.0
- dev: true
/enhanced-resolve@4.5.0:
resolution: {integrity: sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg==}
@@ -10070,6 +10200,13 @@ packages:
- supports-color
dev: true
+ /expand-template@2.0.3:
+ resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==}
+ engines: {node: '>=6'}
+ requiresBuild: true
+ dev: false
+ optional: true
+
/express@4.18.2:
resolution: {integrity: sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==}
engines: {node: '>= 0.10.0'}
@@ -10178,6 +10315,17 @@ packages:
micromatch: 4.0.5
dev: true
+ /fast-glob@3.3.1:
+ resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==}
+ engines: {node: '>=8.6.0'}
+ dependencies:
+ '@nodelib/fs.stat': 2.0.5
+ '@nodelib/fs.walk': 1.2.8
+ glob-parent: 5.1.2
+ merge2: 1.4.1
+ micromatch: 4.0.5
+ dev: true
+
/fast-json-stable-stringify@2.1.0:
resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==}
dev: true
@@ -10221,6 +10369,14 @@ packages:
is-unicode-supported: 1.3.0
dev: true
+ /figures@5.0.0:
+ resolution: {integrity: sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==}
+ engines: {node: '>=14'}
+ dependencies:
+ escape-string-regexp: 5.0.0
+ is-unicode-supported: 1.3.0
+ dev: true
+
/file-entry-cache@6.0.1:
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
engines: {node: ^10.12.0 || >=12.0.0}
@@ -10253,7 +10409,6 @@ packages:
/file-uri-to-path@1.0.0:
resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==}
requiresBuild: true
- dev: true
optional: true
/filelist@1.0.4:
@@ -10393,6 +10548,14 @@ packages:
signal-exit: 3.0.7
dev: true
+ /foreground-child@3.1.1:
+ resolution: {integrity: sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==}
+ engines: {node: '>=14'}
+ dependencies:
+ cross-spawn: 7.0.3
+ signal-exit: 4.1.0
+ dev: true
+
/forever-agent@0.6.1:
resolution: {integrity: sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==}
dev: true
@@ -10476,7 +10639,6 @@ packages:
/fs-constants@1.0.0:
resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==}
- dev: true
/fs-extra@10.1.0:
resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==}
@@ -10664,6 +10826,12 @@ packages:
encoding: 0.1.13
dev: true
+ /github-from-package@0.0.0:
+ resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==}
+ requiresBuild: true
+ dev: false
+ optional: true
+
/gittar@0.1.1:
resolution: {integrity: sha512-p+XuqWJpW9ahUuNTptqeFjudFq31o6Jd+maMBarkMAR5U3K9c7zJB4sQ4BV8mIqrTOV29TtqikDhnZfCD4XNfQ==}
engines: {node: '>=4'}
@@ -10674,6 +10842,7 @@ packages:
/glob-parent@3.1.0:
resolution: {integrity: sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==}
+ requiresBuild: true
dependencies:
is-glob: 3.1.0
path-dirname: 1.0.2
@@ -10692,6 +10861,18 @@ packages:
dependencies:
is-glob: 4.0.3
+ /glob@10.3.3:
+ resolution: {integrity: sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==}
+ engines: {node: '>=16 || 14 >=14.17'}
+ hasBin: true
+ dependencies:
+ foreground-child: 3.1.1
+ jackspeak: 2.2.3
+ minimatch: 9.0.1
+ minipass: 7.0.2
+ path-scurry: 1.10.1
+ dev: true
+
/glob@7.1.4:
resolution: {integrity: sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==}
dependencies:
@@ -10793,6 +10974,17 @@ packages:
slash: 4.0.0
dev: true
+ /globby@13.2.2:
+ resolution: {integrity: sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ dependencies:
+ dir-glob: 3.0.1
+ fast-glob: 3.3.1
+ ignore: 5.2.4
+ merge2: 1.4.1
+ slash: 4.0.0
+ dev: true
+
/got@9.6.0:
resolution: {integrity: sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==}
engines: {node: '>=8.6'}
@@ -11233,7 +11425,6 @@ packages:
/ieee754@1.2.1:
resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==}
- dev: true
/iferr@0.1.5:
resolution: {integrity: sha512-DUNFN5j7Tln0D+TxzloUjKB+CtVu6myn0JEFak6dG18mNt9YkQ6lzGCdafwofISZ1lLF3xRHJ98VKy9ynkcFaA==}
@@ -11326,7 +11517,7 @@ packages:
/ini@1.3.8:
resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==}
- dev: true
+ requiresBuild: true
/ini@2.0.0:
resolution: {integrity: sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==}
@@ -11401,6 +11592,7 @@ packages:
/is-binary-path@1.0.1:
resolution: {integrity: sha512-9fRVlXc0uCxEDj1nQzaWONSpbTfx0FmJfzHF7pwlI8DkWGoHBBea4Pg5Ky0ojwwxQmnSifgbKkI06Qv0Ljgj+Q==}
engines: {node: '>=0.10.0'}
+ requiresBuild: true
dependencies:
binary-extensions: 1.13.1
dev: true
@@ -11533,6 +11725,7 @@ packages:
/is-glob@3.1.0:
resolution: {integrity: sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==}
engines: {node: '>=0.10.0'}
+ requiresBuild: true
dependencies:
is-extglob: 2.1.1
dev: true
@@ -11820,6 +12013,15 @@ packages:
istanbul-lib-report: 3.0.0
dev: true
+ /jackspeak@2.2.3:
+ resolution: {integrity: sha512-pF0kfjmg8DJLxDrizHoCZGUFz4P4czQ3HyfW4BU0ffebYkzAVlBywp5zaxW/TM+r0sGbmrQdi8EQQVTJFxnGsQ==}
+ engines: {node: '>=14'}
+ dependencies:
+ '@isaacs/cliui': 8.0.2
+ optionalDependencies:
+ '@pkgjs/parseargs': 0.11.0
+ dev: true
+
/jake@10.8.5:
resolution: {integrity: sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==}
engines: {node: '>=10'}
@@ -12252,7 +12454,7 @@ packages:
/lower-case@2.0.2:
resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==}
dependencies:
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
/lowercase-keys@1.0.1:
@@ -12265,6 +12467,11 @@ packages:
engines: {node: '>=8'}
dev: true
+ /lru-cache@10.0.0:
+ resolution: {integrity: sha512-svTf/fzsKHffP42sujkO/Rjs37BCIsQVRCeNYIm9WN8rgT7ffoUnRtZCqU+6BqcSBdv8gwJeTz8knJpgACeQMw==}
+ engines: {node: 14 || >=16.14}
+ dev: true
+
/lru-cache@4.1.5:
resolution: {integrity: sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==}
dependencies:
@@ -12283,7 +12490,6 @@ packages:
engines: {node: '>=10'}
dependencies:
yallist: 4.0.0
- dev: true
/lunr@2.3.9:
resolution: {integrity: sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==}
@@ -12490,6 +12696,13 @@ packages:
engines: {node: '>=4'}
dev: true
+ /mimic-response@3.1.0:
+ resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==}
+ engines: {node: '>=10'}
+ requiresBuild: true
+ dev: false
+ optional: true
+
/mini-css-extract-plugin@1.6.2(webpack@4.46.0):
resolution: {integrity: sha512-WhDvO3SjGm40oV5y26GjMJYjd2UMqrLAGKy5YS2/3QKJy2F7jgynuHTir/tgUUOiNQu5saXHdc8reo7YuhhT4Q==}
engines: {node: '>= 10.13.0'}
@@ -12554,7 +12767,6 @@ packages:
/minimist@1.2.7:
resolution: {integrity: sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==}
- dev: true
/minipass-collect@1.0.2:
resolution: {integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==}
@@ -12591,6 +12803,11 @@ packages:
yallist: 4.0.0
dev: true
+ /minipass@7.0.2:
+ resolution: {integrity: sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==}
+ engines: {node: '>=16 || 14 >=14.17'}
+ dev: true
+
/minizlib@1.3.3:
resolution: {integrity: sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==}
dependencies:
@@ -12629,6 +12846,12 @@ packages:
is-extendable: 1.0.1
dev: true
+ /mkdirp-classic@0.5.3:
+ resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==}
+ requiresBuild: true
+ dev: false
+ optional: true
+
/mkdirp@0.5.6:
resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==}
dependencies:
@@ -12758,6 +12981,12 @@ packages:
- supports-color
dev: true
+ /napi-build-utils@1.0.2:
+ resolution: {integrity: sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==}
+ requiresBuild: true
+ dev: false
+ optional: true
+
/native-url@0.3.4:
resolution: {integrity: sha512-6iM8R99ze45ivyH8vybJ7X0yekIcPf5GgLV5K0ENCbmRcaRIDoj37BC8iLEmaaBfqqb8enuZ5p0uhY+lVAbAcA==}
dependencies:
@@ -12787,9 +13016,18 @@ packages:
resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==}
dependencies:
lower-case: 2.0.2
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
+ /node-abi@3.45.0:
+ resolution: {integrity: sha512-iwXuFrMAcFVi/ZoZiqq8BzAdsLw9kxDfTC0HMyjXfSL/6CSDAGD5UmR7azrAgWV1zKYq7dUUMj4owusBWKLsiQ==}
+ engines: {node: '>=10'}
+ requiresBuild: true
+ dependencies:
+ semver: 7.3.8
+ dev: false
+ optional: true
+
/node-addon-api@3.2.1:
resolution: {integrity: sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==}
dev: true
@@ -12868,6 +13106,7 @@ packages:
/normalize-path@2.1.1:
resolution: {integrity: sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==}
engines: {node: '>=0.10.0'}
+ requiresBuild: true
dependencies:
remove-trailing-separator: 1.1.0
dev: true
@@ -13368,7 +13607,7 @@ packages:
resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==}
dependencies:
dot-case: 3.0.4
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
/parent-module@1.0.1:
@@ -13411,6 +13650,11 @@ packages:
engines: {node: '>=6'}
dev: true
+ /parse-ms@3.0.0:
+ resolution: {integrity: sha512-Tpb8Z7r7XbbtBTrM9UhpkzzaMrqA2VXMT3YChzYltwV3P3pM6t8wl7TvpMnSTosz1aQAdVib7kdoys7vYOPerw==}
+ engines: {node: '>=12'}
+ dev: true
+
/parse5@4.0.0:
resolution: {integrity: sha512-VrZ7eOd3T1Fk4XWNXMgiGBK/z0MG48BWG2uQNU4I72fkQuKUTZpl+u9k+CxEG0twMVzSmXEEz12z5Fnw1jIQFA==}
dev: true
@@ -13428,7 +13672,7 @@ packages:
resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==}
dependencies:
no-case: 3.0.4
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
/pascalcase@0.1.1:
@@ -13442,6 +13686,7 @@ packages:
/path-dirname@1.0.2:
resolution: {integrity: sha512-ALzNPpyNq9AqXMBjeymIjFDAkAFH06mHJH/cSBHAgU0s4vfpBn6b2nf8tiRLvagKD8RbTpq2FKTBg7cl9l3c7Q==}
+ requiresBuild: true
dev: true
optional: true
@@ -13477,6 +13722,14 @@ packages:
/path-parse@1.0.7:
resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
+ /path-scurry@1.10.1:
+ resolution: {integrity: sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==}
+ engines: {node: '>=16 || 14 >=14.17'}
+ dependencies:
+ lru-cache: 10.0.0
+ minipass: 7.0.2
+ dev: true
+
/path-to-regexp@0.1.7:
resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==}
dev: true
@@ -14453,6 +14706,27 @@ packages:
/preact@10.11.3:
resolution: {integrity: sha512-eY93IVpod/zG3uMF22Unl8h9KkrcKIRs2EGar8hwLZZDU1lkjph303V9HZBwufh2s736U6VXuhD109LYqPoffg==}
+ /prebuild-install@7.1.1:
+ resolution: {integrity: sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==}
+ engines: {node: '>=10'}
+ hasBin: true
+ requiresBuild: true
+ dependencies:
+ detect-libc: 2.0.2
+ expand-template: 2.0.3
+ github-from-package: 0.0.0
+ minimist: 1.2.7
+ mkdirp-classic: 0.5.3
+ napi-build-utils: 1.0.2
+ node-abi: 3.45.0
+ pump: 3.0.0
+ rc: 1.2.8
+ simple-get: 4.0.1
+ tar-fs: 2.1.1
+ tunnel-agent: 0.6.0
+ dev: false
+ optional: true
+
/prelude-ls@1.1.2:
resolution: {integrity: sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==}
engines: {node: '>= 0.8.0'}
@@ -14513,6 +14787,13 @@ packages:
parse-ms: 2.1.0
dev: true
+ /pretty-ms@8.0.0:
+ resolution: {integrity: sha512-ASJqOugUF1bbzI35STMBUpZqdfYKlJugy6JBziGi2EE+AL5JPJGSzvpeVXojxrr0ViUYoToUjb5kjSEGf7Y83Q==}
+ engines: {node: '>=14.16'}
+ dependencies:
+ parse-ms: 3.0.0
+ dev: true
+
/process-nextick-args@2.0.1:
resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
dev: true
@@ -14626,7 +14907,6 @@ packages:
dependencies:
end-of-stream: 1.4.4
once: 1.4.0
- dev: true
/pumpify@1.5.1:
resolution: {integrity: sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==}
@@ -14736,12 +15016,12 @@ packages:
/rc@1.2.8:
resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==}
+ hasBin: true
dependencies:
deep-extend: 0.6.0
ini: 1.3.8
minimist: 1.2.7
strip-json-comments: 2.0.1
- dev: true
/react-dom@18.2.0(react@18.2.0):
resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==}
@@ -14798,11 +15078,11 @@ packages:
inherits: 2.0.4
string_decoder: 1.3.0
util-deprecate: 1.0.2
- dev: true
/readdirp@2.2.1:
resolution: {integrity: sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==}
engines: {node: '>=0.10'}
+ requiresBuild: true
dependencies:
graceful-fs: 4.2.10
micromatch: 3.1.10
@@ -14928,6 +15208,7 @@ packages:
/remove-trailing-separator@1.1.0:
resolution: {integrity: sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==}
+ requiresBuild: true
dev: true
optional: true
@@ -15129,6 +15410,14 @@ packages:
glob: 7.2.3
dev: true
+ /rimraf@5.0.1:
+ resolution: {integrity: sha512-OfFZdwtd3lZ+XZzYP/6gTACubwFcHdLRqS9UX3UwpU2dnGQYkPFISRwvM3w9IiB2w7bW5qGo/uAwE4SmXXSKvg==}
+ engines: {node: '>=14'}
+ hasBin: true
+ dependencies:
+ glob: 10.3.3
+ dev: true
+
/ripemd160@2.0.2:
resolution: {integrity: sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==}
dependencies:
@@ -15179,7 +15468,6 @@ packages:
/safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
- dev: true
/safe-regex-test@1.0.0:
resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==}
@@ -15295,6 +15583,7 @@ packages:
/semver@5.7.1:
resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==}
+ hasBin: true
dev: true
/semver@6.3.0:
@@ -15319,7 +15608,6 @@ packages:
engines: {node: '>=10'}
dependencies:
lru-cache: 6.0.0
- dev: true
/send@0.18.0:
resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==}
@@ -15479,6 +15767,27 @@ packages:
resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==}
dev: true
+ /signal-exit@4.1.0:
+ resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==}
+ engines: {node: '>=14'}
+ dev: true
+
+ /simple-concat@1.0.1:
+ resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==}
+ requiresBuild: true
+ dev: false
+ optional: true
+
+ /simple-get@4.0.1:
+ resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==}
+ requiresBuild: true
+ dependencies:
+ decompress-response: 6.0.0
+ once: 1.4.0
+ simple-concat: 1.0.1
+ dev: false
+ optional: true
+
/simple-swizzle@0.2.2:
resolution: {integrity: sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==}
dependencies:
@@ -15754,6 +16063,13 @@ packages:
escape-string-regexp: 2.0.0
dev: true
+ /stack-utils@2.0.6:
+ resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ escape-string-regexp: 2.0.0
+ dev: true
+
/static-extend@0.1.2:
resolution: {integrity: sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g==}
engines: {node: '>=0.10.0'}
@@ -15862,7 +16178,6 @@ packages:
resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
dependencies:
safe-buffer: 5.2.1
- dev: true
/stringify-object@3.3.0:
resolution: {integrity: sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==}
@@ -15927,7 +16242,7 @@ packages:
/strip-json-comments@2.0.1:
resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==}
engines: {node: '>=0.10.0'}
- dev: true
+ requiresBuild: true
/strip-json-comments@3.1.1:
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
@@ -16128,6 +16443,17 @@ packages:
engines: {node: '>=6'}
dev: true
+ /tar-fs@2.1.1:
+ resolution: {integrity: sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==}
+ requiresBuild: true
+ dependencies:
+ chownr: 1.1.4
+ mkdirp-classic: 0.5.3
+ pump: 3.0.0
+ tar-stream: 2.2.0
+ dev: false
+ optional: true
+
/tar-stream@2.2.0:
resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==}
engines: {node: '>=6'}
@@ -16137,7 +16463,6 @@ packages:
fs-constants: 1.0.0
inherits: 2.0.4
readable-stream: 3.6.0
- dev: true
/tar@4.4.19:
resolution: {integrity: sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA==}
@@ -16169,6 +16494,11 @@ packages:
engines: {node: '>=8'}
dev: true
+ /temp-dir@3.0.0:
+ resolution: {integrity: sha512-nHc6S/bwIilKHNRgK/3jlhDoIHcp45YgyiwcAk46Tr0LfEqGBVpmiAyuiuxeVE44m3mXnEeVhaipLOEWmH+Njw==}
+ engines: {node: '>=14.16'}
+ dev: true
+
/tempy@0.6.0:
resolution: {integrity: sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw==}
engines: {node: '>=10'}
@@ -16221,7 +16551,7 @@ packages:
resolution: {integrity: sha512-4GnLC0x667eJG0ewJTa6z/yXrbLGv80D9Ru6HIpCQmO+Q4PfEtBFi0ObSckqwL6VyQv/7ENJieXHo2ANmdQwgw==}
engines: {node: '>=6.0.0'}
dependencies:
- acorn: 8.8.1
+ acorn: 8.8.2
commander: 2.20.3
source-map: 0.6.1
source-map-support: 0.5.21
@@ -16232,7 +16562,7 @@ packages:
engines: {node: '>=10'}
dependencies:
'@jridgewell/source-map': 0.3.2
- acorn: 8.8.1
+ acorn: 8.8.2
commander: 2.20.3
source-map-support: 0.5.21
dev: true
@@ -16414,10 +16744,10 @@ packages:
resolution: {integrity: sha512-uivwYcQaxAucv1CzRp2n/QdYPo4ILf9VXgH19zEIjFx2EJufV16P0JtJVpYHy89DItG6Kwj2oIUjrcK5au+4tQ==}
engines: {node: '>=8'}
dependencies:
- tslib: 2.5.3
+ tslib: 2.6.1
dev: true
- /ts-node@10.9.1(@types/node@18.11.17)(typescript@5.1.3):
+ /ts-node@10.9.1(@types/node@20.4.1)(typescript@5.1.3):
resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==}
hasBin: true
peerDependencies:
@@ -16436,7 +16766,7 @@ packages:
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.3
- '@types/node': 18.11.17
+ '@types/node': 20.4.1
acorn: 8.8.1
acorn-walk: 8.2.0
arg: 4.1.3
@@ -16480,6 +16810,14 @@ packages:
/tslib@2.5.3:
resolution: {integrity: sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==}
+ /tslib@2.6.0:
+ resolution: {integrity: sha512-7At1WUettjcSRHXCyYtTselblcHl9PJFFVKiCAy/bY97+BPZXSQ2wbq0P9s8tK2G7dFQfNnlJnPAiArVBVBsfA==}
+ dev: false
+
+ /tslib@2.6.1:
+ resolution: {integrity: sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==}
+ dev: true
+
/tsutils@3.21.0(typescript@5.1.3):
resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==}
engines: {node: '>= 6'}
@@ -16498,7 +16836,6 @@ packages:
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
dependencies:
safe-buffer: 5.2.1
- dev: true
/tweetnacl@0.14.5:
resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==}
@@ -16586,6 +16923,12 @@ packages:
engines: {node: '>=14.17'}
dev: true
+ /typescript@5.1.6:
+ resolution: {integrity: sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==}
+ engines: {node: '>=14.17'}
+ hasBin: true
+ dev: true
+
/uglify-js@3.4.10:
resolution: {integrity: sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw==}
engines: {node: '>=0.8.0'}
@@ -16969,7 +17312,7 @@ packages:
resolution: {integrity: sha512-oKz9Oz9j3rUciLNfpGFjOb49/jEpXNmWdVH8Ls//zNcnLlQdTGXQQMsBbb/gR7Zl8WNLxVCq+0Hqbx3zv6twBw==}
engines: {node: '>= 10.13.0'}
dependencies:
- acorn: 8.8.1
+ acorn: 8.8.2
acorn-walk: 8.2.0
chalk: 4.1.2
commander: 7.2.0
@@ -17420,6 +17763,15 @@ packages:
strip-ansi: 6.0.1
dev: true
+ /wrap-ansi@8.1.0:
+ resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==}
+ engines: {node: '>=12'}
+ dependencies:
+ ansi-styles: 6.2.1
+ string-width: 5.1.2
+ strip-ansi: 7.0.1
+ dev: true
+
/wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
@@ -17440,6 +17792,14 @@ packages:
signal-exit: 3.0.7
dev: true
+ /write-file-atomic@5.0.1:
+ resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==}
+ engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
+ dependencies:
+ imurmurhash: 0.1.4
+ signal-exit: 4.1.0
+ dev: true
+
/ws@6.2.2:
resolution: {integrity: sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw==}
peerDependencies:
@@ -17530,7 +17890,6 @@ packages:
/yallist@4.0.0:
resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
- dev: true
/yaml@1.10.2:
resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==}
@@ -17622,6 +17981,19 @@ packages:
yargs-parser: 21.1.1
dev: true
+ /yargs@17.7.2:
+ resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==}
+ engines: {node: '>=12'}
+ dependencies:
+ cliui: 8.0.1
+ escalade: 3.1.1
+ get-caller-file: 2.0.5
+ require-directory: 2.1.1
+ string-width: 4.2.3
+ y18n: 5.0.8
+ yargs-parser: 21.1.1
+ dev: true
+
/yn@3.1.1:
resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==}
engines: {node: '>=6'}