From b2d0ad57ddf251a109d536cdc49fb6505dbdc50c Mon Sep 17 00:00:00 2001 From: Florian Dold Date: Tue, 11 Jul 2023 15:41:48 +0200 Subject: sqlite3 backend for idb-bridge / wallet-core --- packages/idb-bridge/package.json | 22 +- packages/idb-bridge/src/MemoryBackend.test.ts | 602 +---- packages/idb-bridge/src/MemoryBackend.ts | 367 ++-- packages/idb-bridge/src/SqliteBackend.test.ts | 83 + packages/idb-bridge/src/SqliteBackend.ts | 2301 ++++++++++++++++++++ packages/idb-bridge/src/backend-common.ts | 29 + packages/idb-bridge/src/backend-interface.ts | 142 +- packages/idb-bridge/src/backends.test.ts | 740 +++++++ packages/idb-bridge/src/bridge-idb.ts | 516 +++-- .../abort-in-initial-upgradeneeded.test.ts | 4 +- .../idb-wpt-ported/close-in-upgradeneeded.test.ts | 4 +- .../src/idb-wpt-ported/cursor-overloads.test.ts | 4 +- .../event-dispatch-active-flag.test.ts | 3 + .../idb-wpt-ported/idbcursor-advance-index.test.ts | 4 +- .../idbcursor-continue-index.test.ts | 7 +- .../idbcursor-continue-objectstore.test.ts | 4 +- .../idbcursor-delete-exception-order.test.ts | 4 +- .../idb-wpt-ported/idbcursor-delete-index.test.ts | 4 +- .../idbcursor-delete-objectstore.test.ts | 4 +- .../src/idb-wpt-ported/idbcursor-reused.test.ts | 4 +- .../idb-wpt-ported/idbcursor-update-index.test.ts | 3 + .../src/idb-wpt-ported/idbfactory-cmp.test.ts | 6 +- .../src/idb-wpt-ported/idbfactory-open.test.ts | 27 +- .../src/idb-wpt-ported/idbindex-get.test.ts | 6 +- .../src/idb-wpt-ported/idbindex-openCursor.test.ts | 4 +- .../idbobjectstore-add-put-exception-order.test.ts | 6 +- .../src/idb-wpt-ported/idbobjectstore-add.test.ts | 4 +- .../src/idb-wpt-ported/idbobjectstore-get.test.ts | 4 +- .../src/idb-wpt-ported/idbobjectstore-put.test.ts | 4 +- .../idbobjectstore-rename-store.test.ts | 3 + .../idbtransaction-oncomplete.test.ts | 4 +- .../idb-bridge/src/idb-wpt-ported/keypath.test.ts | 6 +- .../request-bubble-and-capture.test.ts | 4 +- .../transaction-requestqueue.test.ts | 4 +- .../idb-bridge/src/idb-wpt-ported/value.test.ts | 8 +- .../idb-bridge/src/idb-wpt-ported/wptsupport.ts | 27 +- packages/idb-bridge/src/idbpromutil.ts | 26 + packages/idb-bridge/src/idbtypes.ts | 23 - packages/idb-bridge/src/index.ts | 11 +- packages/idb-bridge/src/node-sqlite3-impl.ts | 84 + packages/idb-bridge/src/sqlite3-interface.ts | 34 + packages/idb-bridge/src/testingdb.ts | 43 + packages/idb-bridge/src/util/FakeDomEvent.ts | 103 + packages/idb-bridge/src/util/FakeEventTarget.ts | 2 +- packages/idb-bridge/src/util/extractKey.ts | 4 + packages/idb-bridge/src/util/key-storage.test.ts | 39 + packages/idb-bridge/src/util/key-storage.ts | 363 +++ .../idb-bridge/src/util/makeStoreKeyValue.test.ts | 66 +- packages/idb-bridge/src/util/makeStoreKeyValue.ts | 20 +- packages/idb-bridge/src/util/queueTask.ts | 5 + .../idb-bridge/src/util/structuredClone.test.ts | 61 +- packages/idb-bridge/src/util/structuredClone.ts | 231 +- packages/idb-bridge/src/util/valueToKey.ts | 6 +- packages/idb-bridge/tsconfig.json | 2 +- 54 files changed, 4801 insertions(+), 1290 deletions(-) create mode 100644 packages/idb-bridge/src/SqliteBackend.test.ts create mode 100644 packages/idb-bridge/src/SqliteBackend.ts create mode 100644 packages/idb-bridge/src/backend-common.ts create mode 100644 packages/idb-bridge/src/backends.test.ts create mode 100644 packages/idb-bridge/src/idbpromutil.ts create mode 100644 packages/idb-bridge/src/node-sqlite3-impl.ts create mode 100644 packages/idb-bridge/src/sqlite3-interface.ts create mode 100644 packages/idb-bridge/src/testingdb.ts create mode 100644 packages/idb-bridge/src/util/FakeDomEvent.ts create mode 100644 packages/idb-bridge/src/util/key-storage.test.ts create mode 100644 packages/idb-bridge/src/util/key-storage.ts (limited to 'packages/idb-bridge') diff --git a/packages/idb-bridge/package.json b/packages/idb-bridge/package.json index 88ff8a1c2..2677c302f 100644 --- a/packages/idb-bridge/package.json +++ b/packages/idb-bridge/package.json @@ -18,22 +18,26 @@ "exports": { ".": { "default": "./lib/index.js" + }, + "./node-sqlite3-bindings": { + "default": "./lib/node-sqlite3-impl.js" } }, "devDependencies": { - "@types/node": "^18.11.17", - "ava": "^4.3.3", - "esm": "^3.2.25", + "@types/better-sqlite3": "^7.6.4", + "@types/node": "^20.4.1", + "ava": "^5.3.1", "prettier": "^2.8.8", - "rimraf": "^3.0.2", - "typescript": "^5.1.3" + "rimraf": "^5.0.1", + "typescript": "^5.1.6" }, "dependencies": { - "tslib": "^2.5.3" + "tslib": "^2.6.0" }, "ava": { - "require": [ - "esm" - ] + "failFast": true + }, + "optionalDependencies": { + "better-sqlite3": "^8.4.0" } } diff --git a/packages/idb-bridge/src/MemoryBackend.test.ts b/packages/idb-bridge/src/MemoryBackend.test.ts index 8a544a201..a851309ed 100644 --- a/packages/idb-bridge/src/MemoryBackend.test.ts +++ b/packages/idb-bridge/src/MemoryBackend.test.ts @@ -15,334 +15,9 @@ */ import test from "ava"; -import { - BridgeIDBCursorWithValue, - BridgeIDBDatabase, - BridgeIDBFactory, - BridgeIDBKeyRange, - BridgeIDBRequest, - BridgeIDBTransaction, -} from "./bridge-idb.js"; -import { - IDBCursorDirection, - IDBCursorWithValue, - IDBDatabase, - IDBKeyRange, - IDBValidKey, -} from "./idbtypes.js"; import { MemoryBackend } from "./MemoryBackend.js"; - -function promiseFromRequest(request: BridgeIDBRequest): Promise { - return new Promise((resolve, reject) => { - request.onsuccess = () => { - resolve(request.result); - }; - request.onerror = () => { - reject(request.error); - }; - }); -} - -function promiseFromTransaction( - transaction: BridgeIDBTransaction, -): Promise { - return new Promise((resolve, reject) => { - transaction.oncomplete = () => { - resolve(); - }; - transaction.onerror = () => { - reject(); - }; - }); -} - -test("Spec: Example 1 Part 1", async (t) => { - const backend = new MemoryBackend(); - const idb = new BridgeIDBFactory(backend); - - const request = idb.open("library"); - request.onupgradeneeded = () => { - const db = request.result; - const store = db.createObjectStore("books", { keyPath: "isbn" }); - const titleIndex = store.createIndex("by_title", "title", { unique: true }); - const authorIndex = store.createIndex("by_author", "author"); - - // Populate with initial data. - store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); - store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); - store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); - }; - - await promiseFromRequest(request); - t.pass(); -}); - -test("Spec: Example 1 Part 2", async (t) => { - const backend = new MemoryBackend(); - const idb = new BridgeIDBFactory(backend); - - const request = idb.open("library"); - request.onupgradeneeded = () => { - const db = request.result; - const store = db.createObjectStore("books", { keyPath: "isbn" }); - const titleIndex = store.createIndex("by_title", "title", { unique: true }); - const authorIndex = store.createIndex("by_author", "author"); - }; - - const db: BridgeIDBDatabase = await promiseFromRequest(request); - - t.is(db.name, "library"); - - const tx = db.transaction("books", "readwrite"); - tx.oncomplete = () => { - console.log("oncomplete called"); - }; - - const store = tx.objectStore("books"); - - store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); - store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); - store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); - - await promiseFromTransaction(tx); - - t.pass(); -}); - -test("Spec: Example 1 Part 3", async (t) => { - const backend = new MemoryBackend(); - backend.enableTracing = true; - const idb = new BridgeIDBFactory(backend); - - const request = idb.open("library"); - request.onupgradeneeded = () => { - const db = request.result; - const store = db.createObjectStore("books", { keyPath: "isbn" }); - const titleIndex = store.createIndex("by_title", "title", { unique: true }); - const authorIndex = store.createIndex("by_author", "author"); - }; - - const db: BridgeIDBDatabase = await promiseFromRequest(request); - - t.is(db.name, "library"); - - const tx = db.transaction("books", "readwrite"); - - const store = tx.objectStore("books"); - - store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); - store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); - store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); - - await promiseFromTransaction(tx); - - const tx2 = db.transaction("books", "readonly"); - const store2 = tx2.objectStore("books"); - var index2 = store2.index("by_title"); - const request2 = index2.get("Bedrock Nights"); - const result2: any = await promiseFromRequest(request2); - - t.is(result2.author, "Barney"); - - const tx3 = db.transaction(["books"], "readonly"); - const store3 = tx3.objectStore("books"); - const index3 = store3.index("by_author"); - const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred")); - - await promiseFromRequest(request3); - - let cursor: BridgeIDBCursorWithValue | null; - cursor = request3.result as BridgeIDBCursorWithValue; - t.is(cursor.value.author, "Fred"); - t.is(cursor.value.isbn, 123456); - - cursor.continue(); - - await promiseFromRequest(request3); - - cursor = request3.result as BridgeIDBCursorWithValue; - t.is(cursor.value.author, "Fred"); - t.is(cursor.value.isbn, 234567); - - await promiseFromTransaction(tx3); - - const tx4 = db.transaction("books", "readonly"); - const store4 = tx4.objectStore("books"); - const request4 = store4.openCursor(); - - await promiseFromRequest(request4); - - cursor = request4.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.isbn, 123456); - - cursor.continue(); - - await promiseFromRequest(request4); - - cursor = request4.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.isbn, 234567); - - cursor.continue(); - - await promiseFromRequest(request4); - - cursor = request4.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.isbn, 345678); - - cursor.continue(); - await promiseFromRequest(request4); - - cursor = request4.result; - - t.is(cursor, null); - - const tx5 = db.transaction("books", "readonly"); - const store5 = tx5.objectStore("books"); - const index5 = store5.index("by_author"); - - const request5 = index5.openCursor(null, "next"); - - await promiseFromRequest(request5); - cursor = request5.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.author, "Barney"); - cursor.continue(); - - await promiseFromRequest(request5); - cursor = request5.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.author, "Fred"); - cursor.continue(); - - await promiseFromRequest(request5); - cursor = request5.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.author, "Fred"); - cursor.continue(); - - await promiseFromRequest(request5); - cursor = request5.result; - t.is(cursor, null); - - const request6 = index5.openCursor(null, "nextunique"); - - await promiseFromRequest(request6); - cursor = request6.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.author, "Barney"); - cursor.continue(); - - await promiseFromRequest(request6); - cursor = request6.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.author, "Fred"); - t.is(cursor.value.isbn, 123456); - cursor.continue(); - - await promiseFromRequest(request6); - cursor = request6.result; - t.is(cursor, null); - - const request7 = index5.openCursor(null, "prevunique"); - await promiseFromRequest(request7); - cursor = request7.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.author, "Fred"); - t.is(cursor.value.isbn, 123456); - cursor.continue(); - - await promiseFromRequest(request7); - cursor = request7.result; - if (!cursor) { - throw new Error(); - } - t.is(cursor.value.author, "Barney"); - cursor.continue(); - - await promiseFromRequest(request7); - cursor = request7.result; - t.is(cursor, null); - - db.close(); - - t.pass(); -}); - -test("simple deletion", async (t) => { - const backend = new MemoryBackend(); - const idb = new BridgeIDBFactory(backend); - - const request = idb.open("library"); - request.onupgradeneeded = () => { - const db = request.result; - const store = db.createObjectStore("books", { keyPath: "isbn" }); - const titleIndex = store.createIndex("by_title", "title", { unique: true }); - const authorIndex = store.createIndex("by_author", "author"); - }; - - const db: BridgeIDBDatabase = await promiseFromRequest(request); - - t.is(db.name, "library"); - - const tx = db.transaction("books", "readwrite"); - tx.oncomplete = () => { - console.log("oncomplete called"); - }; - - const store = tx.objectStore("books"); - - store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); - store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); - store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); - - await promiseFromTransaction(tx); - - const tx2 = db.transaction("books", "readwrite"); - - const store2 = tx2.objectStore("books"); - - const req1 = store2.get(234567); - await promiseFromRequest(req1); - t.is(req1.readyState, "done"); - t.is(req1.result.author, "Fred"); - - store2.delete(123456); - - const req2 = store2.get(123456); - await promiseFromRequest(req2); - t.is(req2.readyState, "done"); - t.is(req2.result, undefined); - - const req3 = store2.get(234567); - await promiseFromRequest(req3); - t.is(req3.readyState, "done"); - t.is(req3.result.author, "Fred"); - - await promiseFromTransaction(tx2); - - t.pass(); -}); +import { BridgeIDBDatabase, BridgeIDBFactory } from "./bridge-idb.js"; +import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js"; test("export", async (t) => { const backend = new MemoryBackend(); @@ -386,276 +61,3 @@ test("export", async (t) => { t.is(exportedData2.databases["library"].schema.databaseVersion, 42); t.pass(); }); - -test("update with non-existent index values", async (t) => { - const backend = new MemoryBackend(); - backend.enableTracing = true; - const idb = new BridgeIDBFactory(backend); - const request = idb.open("mydb"); - request.onupgradeneeded = () => { - const db = request.result; - const store = db.createObjectStore("bla", { keyPath: "x" }); - store.createIndex("by_y", "y"); - store.createIndex("by_z", "z"); - }; - - const db: BridgeIDBDatabase = await promiseFromRequest(request); - - t.is(db.name, "mydb"); - - { - const tx = db.transaction("bla", "readwrite"); - const store = tx.objectStore("bla"); - store.put({ x: 0, y: "a", z: 42 }); - const index = store.index("by_z"); - const indRes = await promiseFromRequest(index.get(42)); - t.is(indRes.x, 0); - const res = await promiseFromRequest(store.get(0)); - t.is(res.z, 42); - await promiseFromTransaction(tx); - } - - { - const tx = db.transaction("bla", "readwrite"); - const store = tx.objectStore("bla"); - store.put({ x: 0, y: "a" }); - const res = await promiseFromRequest(store.get(0)); - t.is(res.z, undefined); - await promiseFromTransaction(tx); - } - - { - const tx = db.transaction("bla", "readwrite"); - const store = tx.objectStore("bla"); - const index = store.index("by_z"); - { - const indRes = await promiseFromRequest(index.get(42)); - t.is(indRes, undefined); - } - const res = await promiseFromRequest(store.get(0)); - t.is(res.z, undefined); - await promiseFromTransaction(tx); - } - - t.pass(); -}); - -test("delete from unique index", async (t) => { - const backend = new MemoryBackend(); - backend.enableTracing = true; - const idb = new BridgeIDBFactory(backend); - const request = idb.open("mydb"); - request.onupgradeneeded = () => { - const db = request.result as IDBDatabase; - const store = db.createObjectStore("bla", { keyPath: "x" }); - store.createIndex("by_yz", ["y", "z"], { - unique: true, - }); - }; - - const db: BridgeIDBDatabase = await promiseFromRequest(request); - - t.is(db.name, "mydb"); - - { - const tx = db.transaction("bla", "readwrite"); - const store = tx.objectStore("bla"); - store.put({ x: 0, y: "a", z: 42 }); - const index = store.index("by_yz"); - const indRes = await promiseFromRequest(index.get(["a", 42])); - t.is(indRes.x, 0); - const res = await promiseFromRequest(store.get(0)); - t.is(res.z, 42); - await promiseFromTransaction(tx); - } - - { - const tx = db.transaction("bla", "readwrite"); - const store = tx.objectStore("bla"); - store.put({ x: 0, y: "a", z: 42, extra: 123 }); - await promiseFromTransaction(tx); - } - - t.pass(); -}); - -test("range queries", async (t) => { - const backend = new MemoryBackend(); - backend.enableTracing = true; - const idb = new BridgeIDBFactory(backend); - - const request = idb.open("mydb"); - request.onupgradeneeded = () => { - const db = request.result; - const store = db.createObjectStore("bla", { keyPath: "x" }); - store.createIndex("by_y", "y"); - store.createIndex("by_z", "z"); - }; - - const db: BridgeIDBDatabase = await promiseFromRequest(request); - - t.is(db.name, "mydb"); - - const tx = db.transaction("bla", "readwrite"); - - const store = tx.objectStore("bla"); - - store.put({ x: 0, y: "a" }); - store.put({ x: 2, y: "a" }); - store.put({ x: 4, y: "b" }); - store.put({ x: 8, y: "b" }); - store.put({ x: 10, y: "c" }); - store.put({ x: 12, y: "c" }); - - await promiseFromTransaction(tx); - - async function doCursorStoreQuery( - range: IDBKeyRange | IDBValidKey | undefined, - direction: IDBCursorDirection | undefined, - expected: any[], - ): Promise { - const tx = db.transaction("bla", "readwrite"); - const store = tx.objectStore("bla"); - const vals: any[] = []; - - const req = store.openCursor(range, direction); - while (1) { - await promiseFromRequest(req); - const cursor: IDBCursorWithValue = req.result; - if (!cursor) { - break; - } - cursor.continue(); - vals.push(cursor.value); - } - - await promiseFromTransaction(tx); - - t.deepEqual(vals, expected); - } - - async function doCursorIndexQuery( - range: IDBKeyRange | IDBValidKey | undefined, - direction: IDBCursorDirection | undefined, - expected: any[], - ): Promise { - const tx = db.transaction("bla", "readwrite"); - const store = tx.objectStore("bla"); - const index = store.index("by_y"); - const vals: any[] = []; - - const req = index.openCursor(range, direction); - while (1) { - await promiseFromRequest(req); - const cursor: IDBCursorWithValue = req.result; - if (!cursor) { - break; - } - cursor.continue(); - vals.push(cursor.value); - } - - await promiseFromTransaction(tx); - - t.deepEqual(vals, expected); - } - - await doCursorStoreQuery(undefined, undefined, [ - { - x: 0, - y: "a", - }, - { - x: 2, - y: "a", - }, - { - x: 4, - y: "b", - }, - { - x: 8, - y: "b", - }, - { - x: 10, - y: "c", - }, - { - x: 12, - y: "c", - }, - ]); - - await doCursorStoreQuery( - BridgeIDBKeyRange.bound(0, 12, true, true), - undefined, - [ - { - x: 2, - y: "a", - }, - { - x: 4, - y: "b", - }, - { - x: 8, - y: "b", - }, - { - x: 10, - y: "c", - }, - ], - ); - - await doCursorIndexQuery( - BridgeIDBKeyRange.bound("a", "c", true, true), - undefined, - [ - { - x: 4, - y: "b", - }, - { - x: 8, - y: "b", - }, - ], - ); - - await doCursorIndexQuery(undefined, "nextunique", [ - { - x: 0, - y: "a", - }, - { - x: 4, - y: "b", - }, - { - x: 10, - y: "c", - }, - ]); - - await doCursorIndexQuery(undefined, "prevunique", [ - { - x: 10, - y: "c", - }, - { - x: 4, - y: "b", - }, - { - x: 0, - y: "a", - }, - ]); - - db.close(); - - t.pass(); -}); diff --git a/packages/idb-bridge/src/MemoryBackend.ts b/packages/idb-bridge/src/MemoryBackend.ts index f40f1c98b..526920a9f 100644 --- a/packages/idb-bridge/src/MemoryBackend.ts +++ b/packages/idb-bridge/src/MemoryBackend.ts @@ -14,43 +14,38 @@ permissions and limitations under the License. */ +import { AsyncCondition, TransactionLevel } from "./backend-common.js"; import { Backend, + ConnectResult, DatabaseConnection, DatabaseTransaction, - Schema, - RecordStoreRequest, - IndexProperties, - RecordGetRequest, + IndexGetQuery, + IndexMeta, + ObjectStoreGetQuery, + ObjectStoreMeta, RecordGetResponse, + RecordStoreRequest, + RecordStoreResponse, ResultLevel, StoreLevel, - RecordStoreResponse, } from "./backend-interface.js"; +import { BridgeIDBKeyRange } from "./bridge-idb.js"; +import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js"; +import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js"; +import { compareKeys } from "./util/cmp.js"; +import { ConstraintError, DataError } from "./util/errors.js"; +import { getIndexKeys } from "./util/getIndexKeys.js"; +import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js"; import { structuredClone, structuredEncapsulate, structuredRevive, } from "./util/structuredClone.js"; -import { ConstraintError, DataError } from "./util/errors.js"; -import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js"; -import { compareKeys } from "./util/cmp.js"; -import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js"; -import { getIndexKeys } from "./util/getIndexKeys.js"; -import { openPromise } from "./util/openPromise.js"; -import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js"; -import { BridgeIDBKeyRange } from "./bridge-idb.js"; type Key = IDBValidKey; type Value = unknown; -enum TransactionLevel { - None = 0, - Read = 1, - Write = 2, - VersionChange = 3, -} - interface ObjectStore { originalName: string; modifiedName: string | undefined; @@ -95,24 +90,39 @@ interface Database { connectionCookies: string[]; } -/** @public */ export interface ObjectStoreDump { name: string; keyGenerator: number; records: ObjectStoreRecord[]; } -/** @public */ export interface DatabaseDump { schema: Schema; objectStores: { [name: string]: ObjectStoreDump }; } -/** @public */ export interface MemoryBackendDump { databases: { [name: string]: DatabaseDump }; } +export interface ObjectStoreProperties { + keyPath: string | string[] | null; + autoIncrement: boolean; + indexes: { [nameame: string]: IndexProperties }; +} + +export interface IndexProperties { + keyPath: string | string[]; + multiEntry: boolean; + unique: boolean; +} + +export interface Schema { + databaseName: string; + databaseVersion: number; + objectStores: { [name: string]: ObjectStoreProperties }; +} + interface ObjectStoreMapEntry { store: ObjectStore; indexMap: { [currentName: string]: Index }; @@ -142,27 +152,6 @@ export interface ObjectStoreRecord { value: Value; } -class AsyncCondition { - _waitPromise: Promise; - _resolveWaitPromise: () => void; - constructor() { - const op = openPromise(); - this._waitPromise = op.promise; - this._resolveWaitPromise = op.resolve; - } - - wait(): Promise { - return this._waitPromise; - } - - trigger(): void { - this._resolveWaitPromise(); - const op = openPromise(); - this._waitPromise = op.promise; - this._resolveWaitPromise = op.resolve; - } -} - function nextStoreKey( forward: boolean, data: ISortedMapF, @@ -178,12 +167,6 @@ function nextStoreKey( return res[1].primaryKey; } -function assertInvariant(cond: boolean): asserts cond { - if (!cond) { - throw Error("invariant failed"); - } -} - function nextKey( forward: boolean, tree: ISortedSetF, @@ -230,6 +213,7 @@ function furthestKey( } export interface AccessStats { + primitiveStatements: number; writeTransactions: number; readTransactions: number; writesPerStore: Record; @@ -279,6 +263,7 @@ export class MemoryBackend implements Backend { trackStats: boolean = true; accessStats: AccessStats = { + primitiveStatements: 0, readTransactions: 0, writeTransactions: 0, readsPerStore: {}, @@ -459,7 +444,7 @@ export class MemoryBackend implements Backend { delete this.databases[name]; } - async connectDatabase(name: string): Promise { + async connectDatabase(name: string): Promise { if (this.enableTracing) { console.log(`TRACING: connectDatabase(${name})`); } @@ -498,7 +483,11 @@ export class MemoryBackend implements Backend { this.connections[connectionCookie] = myConn; - return { connectionCookie }; + return { + conn: { connectionCookie }, + version: database.committedSchema.databaseVersion, + objectStores: Object.keys(database.committedSchema.objectStores).sort(), + }; } async beginTransaction( @@ -601,14 +590,6 @@ export class MemoryBackend implements Backend { this.disconnectCond.trigger(); } - private requireConnection(dbConn: DatabaseConnection): Connection { - const myConn = this.connections[dbConn.connectionCookie]; - if (!myConn) { - throw Error(`unknown connection (${dbConn.connectionCookie})`); - } - return myConn; - } - private requireConnectionFromTransaction( btx: DatabaseTransaction, ): Connection { @@ -619,36 +600,6 @@ export class MemoryBackend implements Backend { return myConn; } - getSchema(dbConn: DatabaseConnection): Schema { - if (this.enableTracing) { - console.log(`TRACING: getSchema`); - } - const myConn = this.requireConnection(dbConn); - const db = this.databases[myConn.dbName]; - if (!db) { - throw Error("db not found"); - } - return db.committedSchema; - } - - getCurrentTransactionSchema(btx: DatabaseTransaction): Schema { - const myConn = this.requireConnectionFromTransaction(btx); - const db = this.databases[myConn.dbName]; - if (!db) { - throw Error("db not found"); - } - return myConn.modifiedSchema; - } - - getInitialTransactionSchema(btx: DatabaseTransaction): Schema { - const myConn = this.requireConnectionFromTransaction(btx); - const db = this.databases[myConn.dbName]; - if (!db) { - throw Error("db not found"); - } - return db.committedSchema; - } - renameIndex( btx: DatabaseTransaction, objectStoreName: string, @@ -799,7 +750,7 @@ export class MemoryBackend implements Backend { createObjectStore( btx: DatabaseTransaction, name: string, - keyPath: string[] | null, + keyPath: string | string[] | null, autoIncrement: boolean, ): void { if (this.enableTracing) { @@ -842,7 +793,7 @@ export class MemoryBackend implements Backend { btx: DatabaseTransaction, indexName: string, objectStoreName: string, - keyPath: string[], + keyPath: string | string[], multiEntry: boolean, unique: boolean, ): void { @@ -1102,12 +1053,91 @@ export class MemoryBackend implements Backend { } } - async getRecords( + async getObjectStoreRecords( + btx: DatabaseTransaction, + req: ObjectStoreGetQuery, + ): Promise { + if (this.enableTracing) { + console.log(`TRACING: getObjectStoreRecords`); + console.log("query", req); + } + const myConn = this.requireConnectionFromTransaction(btx); + const db = this.databases[myConn.dbName]; + if (!db) { + throw Error("db not found"); + } + if (db.txLevel < TransactionLevel.Read) { + throw Error("only allowed while running a transaction"); + } + if ( + db.txRestrictObjectStores && + !db.txRestrictObjectStores.includes(req.objectStoreName) + ) { + throw Error( + `Not allowed to access store '${ + req.objectStoreName + }', transaction is over ${JSON.stringify(db.txRestrictObjectStores)}`, + ); + } + const objectStoreMapEntry = myConn.objectStoreMap[req.objectStoreName]; + if (!objectStoreMapEntry) { + throw Error("object store not found"); + } + + let range; + if (req.range == null) { + range = new BridgeIDBKeyRange(undefined, undefined, true, true); + } else { + range = req.range; + } + + if (typeof range !== "object") { + throw Error( + "getObjectStoreRecords was given an invalid range (sanity check failed, not an object)", + ); + } + + if (!("lowerOpen" in range)) { + throw Error( + "getObjectStoreRecords was given an invalid range (sanity check failed, lowerOpen missing)", + ); + } + + const forward: boolean = + req.direction === "next" || req.direction === "nextunique"; + + const storeData = + objectStoreMapEntry.store.modifiedData || + objectStoreMapEntry.store.originalData; + + const resp = getObjectStoreRecords({ + forward, + storeData, + limit: req.limit, + range, + resultLevel: req.resultLevel, + advancePrimaryKey: req.advancePrimaryKey, + lastObjectStorePosition: req.lastObjectStorePosition, + }); + if (this.trackStats) { + const k = `${req.objectStoreName}`; + this.accessStats.readsPerStore[k] = + (this.accessStats.readsPerStore[k] ?? 0) + 1; + this.accessStats.readItemsPerStore[k] = + (this.accessStats.readItemsPerStore[k] ?? 0) + resp.count; + } + if (this.enableTracing) { + console.log(`TRACING: getRecords got ${resp.count} results`); + } + return resp; + } + + async getIndexRecords( btx: DatabaseTransaction, - req: RecordGetRequest, + req: IndexGetQuery, ): Promise { if (this.enableTracing) { - console.log(`TRACING: getRecords`); + console.log(`TRACING: getIndexRecords`); console.log("query", req); } const myConn = this.requireConnectionFromTransaction(btx); @@ -1161,58 +1191,31 @@ export class MemoryBackend implements Backend { objectStoreMapEntry.store.modifiedData || objectStoreMapEntry.store.originalData; - const haveIndex = req.indexName !== undefined; - - let resp: RecordGetResponse; - - if (haveIndex) { - const index = - myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!]; - const indexData = index.modifiedData || index.originalData; - resp = getIndexRecords({ - forward, - indexData, - storeData, - limit: req.limit, - unique, - range, - resultLevel: req.resultLevel, - advanceIndexKey: req.advanceIndexKey, - advancePrimaryKey: req.advancePrimaryKey, - lastIndexPosition: req.lastIndexPosition, - lastObjectStorePosition: req.lastObjectStorePosition, - }); - if (this.trackStats) { - const k = `${req.objectStoreName}.${req.indexName}`; - this.accessStats.readsPerIndex[k] = - (this.accessStats.readsPerIndex[k] ?? 0) + 1; - this.accessStats.readItemsPerIndex[k] = - (this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count; - } - } else { - if (req.advanceIndexKey !== undefined) { - throw Error("unsupported request"); - } - resp = getObjectStoreRecords({ - forward, - storeData, - limit: req.limit, - range, - resultLevel: req.resultLevel, - advancePrimaryKey: req.advancePrimaryKey, - lastIndexPosition: req.lastIndexPosition, - lastObjectStorePosition: req.lastObjectStorePosition, - }); - if (this.trackStats) { - const k = `${req.objectStoreName}`; - this.accessStats.readsPerStore[k] = - (this.accessStats.readsPerStore[k] ?? 0) + 1; - this.accessStats.readItemsPerStore[k] = - (this.accessStats.readItemsPerStore[k] ?? 0) + resp.count; - } + const index = + myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!]; + const indexData = index.modifiedData || index.originalData; + const resp = getIndexRecords({ + forward, + indexData, + storeData, + limit: req.limit, + unique, + range, + resultLevel: req.resultLevel, + advanceIndexKey: req.advanceIndexKey, + advancePrimaryKey: req.advancePrimaryKey, + lastIndexPosition: req.lastIndexPosition, + lastObjectStorePosition: req.lastObjectStorePosition, + }); + if (this.trackStats) { + const k = `${req.objectStoreName}.${req.indexName}`; + this.accessStats.readsPerIndex[k] = + (this.accessStats.readsPerIndex[k] ?? 0) + 1; + this.accessStats.readItemsPerIndex[k] = + (this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count; } if (this.enableTracing) { - console.log(`TRACING: getRecords got ${resp.count} results`); + console.log(`TRACING: getIndexRecords got ${resp.count} results`); } return resp; } @@ -1294,13 +1297,13 @@ export class MemoryBackend implements Backend { let storeKeyResult: StoreKeyResult; try { - storeKeyResult = makeStoreKeyValue( - storeReq.value, - storeReq.key, - keygen, - autoIncrement, - keyPath, - ); + storeKeyResult = makeStoreKeyValue({ + value: storeReq.value, + key: storeReq.key, + currentKeyGenerator: keygen, + autoIncrement: autoIncrement, + keyPath: keyPath, + }); } catch (e) { if (e instanceof DataError) { const kp = JSON.stringify(keyPath); @@ -1445,7 +1448,7 @@ export class MemoryBackend implements Backend { } } - async rollback(btx: DatabaseTransaction): Promise { + rollback(btx: DatabaseTransaction): void { if (this.enableTracing) { console.log(`TRACING: rollback`); } @@ -1536,6 +1539,57 @@ export class MemoryBackend implements Backend { await this.afterCommitCallback(); } } + + getObjectStoreMeta( + dbConn: DatabaseConnection, + objectStoreName: string, + ): ObjectStoreMeta | undefined { + const conn = this.connections[dbConn.connectionCookie]; + if (!conn) { + throw Error("db connection not found"); + } + let schema = conn.modifiedSchema; + if (!schema) { + throw Error(); + } + const storeInfo = schema.objectStores[objectStoreName]; + if (!storeInfo) { + return undefined; + } + return { + autoIncrement: storeInfo.autoIncrement, + indexSet: Object.keys(storeInfo.indexes).sort(), + keyPath: structuredClone(storeInfo.keyPath), + }; + } + + getIndexMeta( + dbConn: DatabaseConnection, + objectStoreName: string, + indexName: string, + ): IndexMeta | undefined { + const conn = this.connections[dbConn.connectionCookie]; + if (!conn) { + throw Error("db connection not found"); + } + let schema = conn.modifiedSchema; + if (!schema) { + throw Error(); + } + const storeInfo = schema.objectStores[objectStoreName]; + if (!storeInfo) { + return undefined; + } + const indexInfo = storeInfo.indexes[indexName]; + if (!indexInfo) { + return; + } + return { + keyPath: structuredClone(indexInfo.keyPath), + multiEntry: indexInfo.multiEntry, + unique: indexInfo.unique, + }; + } } function getIndexRecords(req: { @@ -1734,7 +1788,6 @@ function getIndexRecords(req: { function getObjectStoreRecords(req: { storeData: ISortedMapF; - lastIndexPosition?: IDBValidKey; forward: boolean; range: IDBKeyRange; lastObjectStorePosition?: IDBValidKey; @@ -1743,7 +1796,6 @@ function getObjectStoreRecords(req: { resultLevel: ResultLevel; }): RecordGetResponse { let numResults = 0; - const indexKeys: Key[] = []; const primaryKeys: Key[] = []; const values: Value[] = []; const { storeData, range, forward } = req; @@ -1751,8 +1803,7 @@ function getObjectStoreRecords(req: { function packResult(): RecordGetResponse { return { count: numResults, - indexKeys: - req.resultLevel >= ResultLevel.OnlyKeys ? indexKeys : undefined, + indexKeys: undefined, primaryKeys: req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined, values: req.resultLevel >= ResultLevel.Full ? values : undefined, @@ -1762,8 +1813,8 @@ function getObjectStoreRecords(req: { const rangeStart = forward ? range.lower : range.upper; const dataStart = forward ? storeData.minKey() : storeData.maxKey(); let storePos = req.lastObjectStorePosition; - storePos = furthestKey(forward, storePos, rangeStart); storePos = furthestKey(forward, storePos, dataStart); + storePos = furthestKey(forward, storePos, rangeStart); storePos = furthestKey(forward, storePos, req.advancePrimaryKey); if (storePos != null) { diff --git a/packages/idb-bridge/src/SqliteBackend.test.ts b/packages/idb-bridge/src/SqliteBackend.test.ts new file mode 100644 index 000000000..612cb9d4b --- /dev/null +++ b/packages/idb-bridge/src/SqliteBackend.test.ts @@ -0,0 +1,83 @@ +/* + Copyright 2019 Florian Dold + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + or implied. See the License for the specific language governing + permissions and limitations under the License. + */ + +import test from "ava"; +import { createSqliteBackend } from "./SqliteBackend.js"; +import { ResultLevel, StoreLevel } from "./backend-interface.js"; +import { BridgeIDBKeyRange } from "./bridge-idb.js"; +import * as fs from "node:fs"; +import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js"; + +test("sqlite3 backend", async (t) => { + const filename = "mytestdb.sqlite3"; + try { + fs.unlinkSync(filename); + } catch (e) { + // Do nothing. + } + try { + const sqlite3Impl = await createNodeSqlite3Impl(); + const backend = await createSqliteBackend(sqlite3Impl, { + filename, + }); + const dbConnRes = await backend.connectDatabase("mydb"); + const dbConn = dbConnRes.conn; + const tx = await backend.enterVersionChange(dbConn, 1); + backend.createObjectStore(tx, "books", "isbn", true); + backend.createIndex(tx, "byName", "books", "name", false, false); + await backend.storeRecord(tx, { + objectStoreName: "books", + storeLevel: StoreLevel.AllowOverwrite, + value: { name: "foo" }, + key: undefined, + }); + const res = await backend.getObjectStoreRecords(tx, { + direction: "next", + limit: 1, + objectStoreName: "books", + resultLevel: ResultLevel.Full, + range: BridgeIDBKeyRange.only(1), + }); + t.deepEqual(res.count, 1); + t.deepEqual(res.primaryKeys![0], 1); + t.deepEqual(res.values![0].name, "foo"); + + const indexRes = await backend.getIndexRecords(tx, { + direction: "next", + limit: 1, + objectStoreName: "books", + indexName: "byName", + resultLevel: ResultLevel.Full, + range: BridgeIDBKeyRange.only("foo"), + }); + + t.deepEqual(indexRes.count, 1); + t.deepEqual(indexRes.values![0].isbn, 1); + t.deepEqual(indexRes.values![0].name, "foo"); + + await backend.commit(tx); + + const tx2 = await backend.beginTransaction(dbConn, ["books"], "readwrite"); + await backend.commit(tx2); + + await backend.close(dbConn); + + t.pass(); + } catch (e: any) { + console.log(e); + throw e; + } +}); diff --git a/packages/idb-bridge/src/SqliteBackend.ts b/packages/idb-bridge/src/SqliteBackend.ts new file mode 100644 index 000000000..c40281861 --- /dev/null +++ b/packages/idb-bridge/src/SqliteBackend.ts @@ -0,0 +1,2301 @@ +/* + Copyright 2023 Taler Systems S.A. + + GNU Taler is free software; you can redistribute it and/or modify it under the + terms of the GNU General Public License as published by the Free Software + Foundation; either version 3, or (at your option) any later version. + + GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY + WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR + A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with + GNU Taler; see the file COPYING. If not, see + */ + +/** + * Imports. + */ +import { AsyncCondition } from "./backend-common.js"; +import { + Backend, + ConnectResult, + DatabaseConnection, + DatabaseTransaction, + IndexGetQuery, + IndexMeta, + ObjectStoreGetQuery, + ObjectStoreMeta, + RecordGetResponse, + RecordStoreRequest, + RecordStoreResponse, + ResultLevel, + StoreLevel, +} from "./backend-interface.js"; +import { BridgeIDBDatabaseInfo, BridgeIDBKeyRange } from "./bridge-idb.js"; +import { + IDBKeyPath, + IDBKeyRange, + IDBTransactionMode, + IDBValidKey, +} from "./idbtypes.js"; +import { + AccessStats, + structuredEncapsulate, + structuredRevive, +} from "./index.js"; +import { ConstraintError, DataError } from "./util/errors.js"; +import { getIndexKeys } from "./util/getIndexKeys.js"; +import { deserializeKey, serializeKey } from "./util/key-storage.js"; +import { makeStoreKeyValue } from "./util/makeStoreKeyValue.js"; +import { + Sqlite3Database, + Sqlite3Interface, + Sqlite3Statement, +} from "./sqlite3-interface.js"; + +function assertDbInvariant(b: boolean): asserts b { + if (!b) { + throw Error("internal invariant failed"); + } +} + +const SqliteError = { + constraintPrimarykey: "SQLITE_CONSTRAINT_PRIMARYKEY", +} as const; + +export type SqliteRowid = number | bigint; + +enum TransactionLevel { + None = 0, + Read = 1, + Write = 2, + VersionChange = 3, +} + +interface ConnectionInfo { + // Database that the connection has + // connected to. + databaseName: string; +} + +interface TransactionInfo { + connectionCookie: string; +} + +interface ScopeIndexInfo { + indexId: SqliteRowid; + keyPath: IDBKeyPath | IDBKeyPath[]; + multiEntry: boolean; + unique: boolean; +} + +interface ScopeInfo { + /** + * Internal ID of the object store. + * Used for fast retrieval, since it's the + * primary key / rowid of the sqlite table. + */ + objectStoreId: SqliteRowid; + + indexMap: Map; +} + +interface IndexIterPos { + objectPos: Uint8Array; + indexPos: Uint8Array; +} + +export function serializeKeyPath( + keyPath: string | string[] | null, +): string | null { + if (Array.isArray(keyPath)) { + return "," + keyPath.join(","); + } + return keyPath; +} + +export function deserializeKeyPath( + dbKeyPath: string | null, +): string | string[] | null { + if (dbKeyPath == null) { + return null; + } + if (dbKeyPath[0] === ",") { + const elems = dbKeyPath.split(","); + elems.splice(0, 1); + return elems; + } else { + return dbKeyPath; + } +} + +interface Boundary { + key: Uint8Array; + inclusive: boolean; +} + +function getRangeEndBoundary( + forward: boolean, + range: IDBKeyRange | undefined | null, +): Boundary | undefined { + let endRangeKey: Uint8Array | undefined = undefined; + let endRangeInclusive: boolean = false; + if (range) { + if (forward && range.upper != null) { + endRangeKey = serializeKey(range.upper); + endRangeInclusive = !range.upperOpen; + } else if (!forward && range.lower != null) { + endRangeKey = serializeKey(range.lower); + endRangeInclusive = !range.lowerOpen; + } + } + if (endRangeKey) { + return { + inclusive: endRangeInclusive, + key: endRangeKey, + }; + } + return undefined; +} + +function isOutsideBoundary( + forward: boolean, + endRange: Boundary, + currentKey: Uint8Array, +): boolean { + const cmp = compareSerializedKeys(currentKey, endRange.key); + if (forward && endRange.inclusive && cmp > 0) { + return true; + } else if (forward && !endRange.inclusive && cmp >= 0) { + return true; + } else if (!forward && endRange.inclusive && cmp < 0) { + return true; + } else if (!forward && !endRange.inclusive && cmp <= 0) { + return true; + } + return false; +} + +function compareSerializedKeys(k1: Uint8Array, k2: Uint8Array): number { + // FIXME: Simplify! + let i = 0; + while (1) { + let x1 = i >= k1.length ? -1 : k1[i]; + let x2 = i >= k2.length ? -1 : k2[i]; + if (x1 < x2) { + return -1; + } + if (x1 > x2) { + return 1; + } + if (x1 < 0 && x2 < 0) { + return 0; + } + i++; + } + throw Error("not reached"); +} + +export function expectDbNumber( + resultRow: unknown, + name: string, +): number | bigint { + assertDbInvariant(typeof resultRow === "object" && resultRow != null); + const res = (resultRow as any)[name]; + if (typeof res !== "number") { + throw Error("unexpected type from database"); + } + return res; +} + +export function expectDbString(resultRow: unknown, name: string): string { + assertDbInvariant(typeof resultRow === "object" && resultRow != null); + const res = (resultRow as any)[name]; + if (typeof res !== "string") { + throw Error("unexpected type from database"); + } + return res; +} + +export function expectDbStringOrNull( + resultRow: unknown, + name: string, +): string | null { + assertDbInvariant(typeof resultRow === "object" && resultRow != null); + const res = (resultRow as any)[name]; + if (res == null) { + return null; + } + if (typeof res !== "string") { + throw Error("unexpected type from database"); + } + return res; +} + +export class SqliteBackend implements Backend { + private connectionIdCounter = 1; + private transactionIdCounter = 1; + + trackStats = false; + + accessStats: AccessStats = { + primitiveStatements: 0, // Counted by the sqlite impl + readTransactions: 0, + writeTransactions: 0, + readsPerStore: {}, + readsPerIndex: {}, + readItemsPerIndex: {}, + readItemsPerStore: {}, + writesPerStore: {}, + }; + + /** + * Condition that is triggered whenever a transaction finishes. + */ + private transactionDoneCond: AsyncCondition = new AsyncCondition(); + + /** + * Is the connection blocked because either an open request + * or delete request is being processed? + */ + private connectionBlocked: boolean = false; + + private txLevel: TransactionLevel = TransactionLevel.None; + + private txScope: Map = new Map(); + + private connectionMap: Map = new Map(); + + private transactionMap: Map = new Map(); + + private sqlPrepCache: Map = new Map(); + + enableTracing: boolean = true; + + constructor( + public sqliteImpl: Sqlite3Interface, + public db: Sqlite3Database, + ) {} + + private _prep(sql: string): Sqlite3Statement { + const stmt = this.sqlPrepCache.get(sql); + if (stmt) { + return stmt; + } + const newStmt = this.db.prepare(sql); + this.sqlPrepCache.set(sql, newStmt); + return newStmt; + } + + async getIndexRecords( + btx: DatabaseTransaction, + req: IndexGetQuery, + ): Promise { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.txLevel < TransactionLevel.Read) { + throw Error("only allowed in read transaction"); + } + const scopeInfo = this.txScope.get(req.objectStoreName); + if (!scopeInfo) { + throw Error("object store not in scope"); + } + const indexInfo = scopeInfo.indexMap.get(req.indexName); + if (!indexInfo) { + throw Error("index not found"); + } + if (req.advancePrimaryKey != null) { + if (req.advanceIndexKey == null) { + throw Error( + "invalid request (advancePrimaryKey without advanceIndexKey)", + ); + } + } + + if (this.enableTracing) { + console.log( + `querying index os=${req.objectStoreName}, idx=${req.indexName}, direction=${req.direction}`, + ); + } + + const forward: boolean = + req.direction === "next" || req.direction === "nextunique"; + + const queryUnique = + req.direction === "nextunique" || req.direction === "prevunique"; + + const indexId = indexInfo.indexId; + const indexUnique = indexInfo.unique; + + let numResults = 0; + const encPrimaryKeys: Uint8Array[] = []; + const encIndexKeys: Uint8Array[] = []; + const indexKeys: IDBValidKey[] = []; + const primaryKeys: IDBValidKey[] = []; + const values: unknown[] = []; + + const endRange = getRangeEndBoundary(forward, req.range); + + const backendThis = this; + + function packResult() { + if (req.resultLevel > ResultLevel.OnlyCount) { + for (let i = 0; i < encPrimaryKeys.length; i++) { + primaryKeys.push(deserializeKey(encPrimaryKeys[i])); + } + for (let i = 0; i < encIndexKeys.length; i++) { + indexKeys.push(deserializeKey(encIndexKeys[i])); + } + if (req.resultLevel === ResultLevel.Full) { + for (let i = 0; i < encPrimaryKeys.length; i++) { + const val = backendThis._getObjectValue( + scopeInfo!.objectStoreId, + encPrimaryKeys[i], + ); + if (!val) { + throw Error("invariant failed: value not found"); + } + values.push(structuredRevive(JSON.parse(val))); + } + } + } + + if (backendThis.enableTracing) { + console.log(`index query returned ${numResults} results`); + console.log(`result prim keys:`, primaryKeys); + console.log(`result index keys:`, indexKeys); + } + + if (backendThis.trackStats) { + const k = `${req.objectStoreName}.${req.indexName}`; + backendThis.accessStats.readsPerIndex[k] = + (backendThis.accessStats.readsPerIndex[k] ?? 0) + 1; + backendThis.accessStats.readItemsPerIndex[k] = + (backendThis.accessStats.readItemsPerIndex[k] ?? 0) + numResults; + } + + return { + count: numResults, + indexKeys: indexKeys, + primaryKeys: + req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined, + values: req.resultLevel >= ResultLevel.Full ? values : undefined, + }; + } + + let currentPos = this._startIndex({ + indexId, + indexUnique, + queryUnique, + forward, + }); + + if (!currentPos) { + return packResult(); + } + + if (this.enableTracing && currentPos) { + console.log(`starting iteration at:`); + console.log(`indexKey:`, deserializeKey(currentPos.indexPos)); + console.log(`objectKey:`, deserializeKey(currentPos.objectPos)); + } + + if (req.advanceIndexKey) { + const advanceIndexKey = serializeKey(req.advanceIndexKey); + const advancePrimaryKey = req.advancePrimaryKey + ? serializeKey(req.advancePrimaryKey) + : undefined; + currentPos = this._continueIndex({ + indexId, + indexUnique, + queryUnique, + inclusive: true, + currentPos, + forward, + targetIndexKey: advanceIndexKey, + targetObjectKey: advancePrimaryKey, + }); + if (!currentPos) { + return packResult(); + } + } + + if (req.lastIndexPosition) { + if (this.enableTracing) { + console.log("index query: seeking past last index position"); + console.log("lastObjectPosition", req.lastObjectStorePosition); + console.log("lastIndexPosition", req.lastIndexPosition); + } + const lastIndexPosition = serializeKey(req.lastIndexPosition); + const lastObjectPosition = req.lastObjectStorePosition + ? serializeKey(req.lastObjectStorePosition) + : undefined; + currentPos = this._continueIndex({ + indexId, + indexUnique, + queryUnique, + inclusive: false, + currentPos, + forward, + targetIndexKey: lastIndexPosition, + targetObjectKey: lastObjectPosition, + }); + if (!currentPos) { + return packResult(); + } + } + + if (this.enableTracing && currentPos) { + console.log( + "before range, current index pos", + deserializeKey(currentPos.indexPos), + ); + console.log( + "... current object pos", + deserializeKey(currentPos.objectPos), + ); + } + + if (req.range != null) { + const targetKeyObj = forward ? req.range.lower : req.range.upper; + if (targetKeyObj != null) { + const targetKey = serializeKey(targetKeyObj); + const inclusive = forward ? !req.range.lowerOpen : !req.range.upperOpen; + currentPos = this._continueIndex({ + indexId, + indexUnique, + queryUnique, + inclusive, + currentPos, + forward, + targetIndexKey: targetKey, + }); + } + if (!currentPos) { + return packResult(); + } + } + + if (this.enableTracing && currentPos) { + console.log( + "after range, current pos", + deserializeKey(currentPos.indexPos), + ); + console.log( + "after range, current obj pos", + deserializeKey(currentPos.objectPos), + ); + } + + while (1) { + if (req.limit != 0 && numResults == req.limit) { + break; + } + if (currentPos == null) { + break; + } + if ( + endRange && + isOutsideBoundary(forward, endRange, currentPos.indexPos) + ) { + break; + } + + numResults++; + + if (req.resultLevel > ResultLevel.OnlyCount) { + encPrimaryKeys.push(currentPos.objectPos); + encIndexKeys.push(currentPos.indexPos); + } + + currentPos = backendThis._continueIndex({ + indexId, + indexUnique, + forward, + inclusive: false, + currentPos: undefined, + queryUnique, + targetIndexKey: currentPos.indexPos, + targetObjectKey: currentPos.objectPos, + }); + } + + return packResult(); + } + + // Continue past targetIndexKey (and optionally targetObjectKey) + // in the direction specified by "forward". + // Do nothing if the current position is already past the + // target position. + _continueIndex(req: { + indexId: SqliteRowid; + indexUnique: boolean; + queryUnique: boolean; + forward: boolean; + inclusive: boolean; + currentPos: IndexIterPos | null | undefined; + targetIndexKey: Uint8Array; + targetObjectKey?: Uint8Array; + }): IndexIterPos | undefined { + const currentPos = req.currentPos; + const forward = req.forward; + const dir = forward ? 1 : -1; + if (currentPos) { + // Check that the target position after the current position. + // If not, we just stay at the current position. + const indexCmp = compareSerializedKeys( + currentPos.indexPos, + req.targetIndexKey, + ); + if (dir * indexCmp > 0) { + return currentPos; + } + if (indexCmp === 0) { + if (req.targetObjectKey != null) { + const objectCmp = compareSerializedKeys( + currentPos.objectPos, + req.targetObjectKey, + ); + if (req.inclusive && objectCmp === 0) { + return currentPos; + } + if (dir * objectCmp > 0) { + return currentPos; + } + } else if (req.inclusive) { + return currentPos; + } + } + } + + let stmt: Sqlite3Statement; + + if (req.indexUnique) { + if (req.forward) { + if (req.inclusive) { + stmt = this._prep(sqlUniqueIndexDataContinueForwardInclusive); + } else { + stmt = this._prep(sqlUniqueIndexDataContinueForwardStrict); + } + } else { + if (req.inclusive) { + stmt = this._prep(sqlUniqueIndexDataContinueBackwardInclusive); + } else { + stmt = this._prep(sqlUniqueIndexDataContinueBackwardStrict); + } + } + } else { + if (req.forward) { + if (req.queryUnique || req.targetObjectKey == null) { + if (req.inclusive) { + stmt = this._prep(sqlIndexDataContinueForwardInclusiveUnique); + } else { + stmt = this._prep(sqlIndexDataContinueForwardStrictUnique); + } + } else { + if (req.inclusive) { + stmt = this._prep(sqlIndexDataContinueForwardInclusive); + } else { + stmt = this._prep(sqlIndexDataContinueForwardStrict); + } + } + } else { + if (req.queryUnique || req.targetObjectKey == null) { + if (req.inclusive) { + stmt = this._prep(sqlIndexDataContinueBackwardInclusiveUnique); + } else { + stmt = this._prep(sqlIndexDataContinueBackwardStrictUnique); + } + } else { + if (req.inclusive) { + stmt = this._prep(sqlIndexDataContinueBackwardInclusive); + } else { + stmt = this._prep(sqlIndexDataContinueBackwardStrict); + } + } + } + } + + const res = stmt.getFirst({ + index_id: req.indexId, + index_key: req.targetIndexKey, + object_key: req.targetObjectKey, + }); + + if (res == null) { + return undefined; + } + + assertDbInvariant(typeof res === "object"); + assertDbInvariant("index_key" in res); + const indexKey = res.index_key; + if (indexKey == null) { + return undefined; + } + assertDbInvariant(indexKey instanceof Uint8Array); + assertDbInvariant("object_key" in res); + const objectKey = res.object_key; + if (objectKey == null) { + return undefined; + } + assertDbInvariant(objectKey instanceof Uint8Array); + + return { + indexPos: indexKey, + objectPos: objectKey, + }; + } + + _startIndex(req: { + indexId: SqliteRowid; + indexUnique: boolean; + queryUnique: boolean; + forward: boolean; + }): IndexIterPos | undefined { + let stmt: Sqlite3Statement; + if (req.indexUnique) { + if (req.forward) { + stmt = this._prep(sqlUniqueIndexDataStartForward); + } else { + stmt = this._prep(sqlUniqueIndexDataStartBackward); + } + } else { + if (req.forward) { + stmt = this._prep(sqlIndexDataStartForward); + } else { + if (req.queryUnique) { + stmt = this._prep(sqlIndexDataStartBackwardUnique); + } else { + stmt = this._prep(sqlIndexDataStartBackward); + } + } + } + + const res = stmt.getFirst({ + index_id: req.indexId, + }); + + if (res == null) { + return undefined; + } + + assertDbInvariant(typeof res === "object"); + assertDbInvariant("index_key" in res); + const indexKey = res.index_key; + assertDbInvariant(indexKey instanceof Uint8Array); + assertDbInvariant("object_key" in res); + const objectKey = res.object_key; + assertDbInvariant(objectKey instanceof Uint8Array); + + return { + indexPos: indexKey, + objectPos: objectKey, + }; + } + + async getObjectStoreRecords( + btx: DatabaseTransaction, + req: ObjectStoreGetQuery, + ): Promise { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.txLevel < TransactionLevel.Read) { + throw Error("only allowed in read transaction"); + } + const scopeInfo = this.txScope.get(req.objectStoreName); + if (!scopeInfo) { + throw Error( + `object store ${JSON.stringify( + req.objectStoreName, + )} not in transaction scope`, + ); + } + + const forward: boolean = + req.direction === "next" || req.direction === "nextunique"; + + let currentKey = this._startObjectKey(scopeInfo.objectStoreId, forward); + + if (req.advancePrimaryKey != null) { + const targetKey = serializeKey(req.advancePrimaryKey); + currentKey = this._continueObjectKey({ + objectStoreId: scopeInfo.objectStoreId, + forward, + inclusive: true, + currentKey, + targetKey, + }); + } + + if (req.lastObjectStorePosition != null) { + const targetKey = serializeKey(req.lastObjectStorePosition); + currentKey = this._continueObjectKey({ + objectStoreId: scopeInfo.objectStoreId, + forward, + inclusive: false, + currentKey, + targetKey, + }); + } + + if (req.range != null) { + const targetKeyObj = forward ? req.range.lower : req.range.upper; + if (targetKeyObj != null) { + const targetKey = serializeKey(targetKeyObj); + const inclusive = forward ? !req.range.lowerOpen : !req.range.upperOpen; + currentKey = this._continueObjectKey({ + objectStoreId: scopeInfo.objectStoreId, + forward, + inclusive, + currentKey, + targetKey, + }); + } + } + + const endRange = getRangeEndBoundary(forward, req.range); + + let numResults = 0; + const encPrimaryKeys: Uint8Array[] = []; + const primaryKeys: IDBValidKey[] = []; + const values: unknown[] = []; + + while (1) { + if (req.limit != 0 && numResults == req.limit) { + break; + } + if (currentKey == null) { + break; + } + if (endRange && isOutsideBoundary(forward, endRange, currentKey)) { + break; + } + + numResults++; + + if (req.resultLevel > ResultLevel.OnlyCount) { + encPrimaryKeys.push(currentKey); + } + + currentKey = this._continueObjectKey({ + objectStoreId: scopeInfo.objectStoreId, + forward, + inclusive: false, + currentKey: null, + targetKey: currentKey, + }); + } + + if (req.resultLevel > ResultLevel.OnlyCount) { + for (let i = 0; i < encPrimaryKeys.length; i++) { + primaryKeys.push(deserializeKey(encPrimaryKeys[i])); + } + if (req.resultLevel === ResultLevel.Full) { + for (let i = 0; i < encPrimaryKeys.length; i++) { + const val = this._getObjectValue( + scopeInfo.objectStoreId, + encPrimaryKeys[i], + ); + if (!val) { + throw Error("invariant failed: value not found"); + } + values.push(structuredRevive(JSON.parse(val))); + } + } + } + + if (this.trackStats) { + const k = `${req.objectStoreName}`; + this.accessStats.readsPerStore[k] = + (this.accessStats.readsPerStore[k] ?? 0) + 1; + this.accessStats.readItemsPerStore[k] = + (this.accessStats.readItemsPerStore[k] ?? 0) + numResults; + } + + return { + count: numResults, + indexKeys: undefined, + primaryKeys: + req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined, + values: req.resultLevel >= ResultLevel.Full ? values : undefined, + }; + } + + _startObjectKey( + objectStoreId: number | bigint, + forward: boolean, + ): Uint8Array | null { + let stmt: Sqlite3Statement; + if (forward) { + stmt = this._prep(sqlObjectDataStartForward); + } else { + stmt = this._prep(sqlObjectDataStartBackward); + } + const res = stmt.getFirst({ + object_store_id: objectStoreId, + }); + if (!res) { + return null; + } + assertDbInvariant(typeof res === "object"); + assertDbInvariant("rkey" in res); + const rkey = res.rkey; + if (!rkey) { + return null; + } + assertDbInvariant(rkey instanceof Uint8Array); + return rkey; + } + + // Result *must* be past targetKey in the direction + // specified by "forward". + _continueObjectKey(req: { + objectStoreId: number | bigint; + forward: boolean; + currentKey: Uint8Array | null; + targetKey: Uint8Array; + inclusive: boolean; + }): Uint8Array | null { + const { forward, currentKey, targetKey } = req; + const dir = forward ? 1 : -1; + if (currentKey) { + const objCmp = compareSerializedKeys(currentKey, targetKey); + if (objCmp === 0 && req.inclusive) { + return currentKey; + } + if (dir * objCmp > 0) { + return currentKey; + } + } + + let stmt: Sqlite3Statement; + + if (req.inclusive) { + if (req.forward) { + stmt = this._prep(sqlObjectDataContinueForwardInclusive); + } else { + stmt = this._prep(sqlObjectDataContinueBackwardInclusive); + } + } else { + if (req.forward) { + stmt = this._prep(sqlObjectDataContinueForward); + } else { + stmt = this._prep(sqlObjectDataContinueBackward); + } + } + + const res = stmt.getFirst({ + object_store_id: req.objectStoreId, + x: req.targetKey, + }); + + if (!res) { + return null; + } + + assertDbInvariant(typeof res === "object"); + assertDbInvariant("rkey" in res); + const rkey = res.rkey; + if (!rkey) { + return null; + } + assertDbInvariant(rkey instanceof Uint8Array); + return rkey; + } + + _getObjectValue( + objectStoreId: number | bigint, + key: Uint8Array, + ): string | undefined { + const stmt = this._prep(sqlObjectDataValueFromKey); + const res = stmt.getFirst({ + object_store_id: objectStoreId, + key: key, + }); + if (!res) { + return undefined; + } + assertDbInvariant(typeof res === "object"); + assertDbInvariant("value" in res); + assertDbInvariant(typeof res.value === "string"); + return res.value; + } + + getObjectStoreMeta( + dbConn: DatabaseConnection, + objectStoreName: string, + ): ObjectStoreMeta | undefined { + // FIXME: Use cached info from the connection for this! + const connInfo = this.connectionMap.get(dbConn.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({ + name: objectStoreName, + database_name: connInfo.databaseName, + }); + if (!objRes) { + throw Error("object store not found"); + } + const objectStoreId = expectDbNumber(objRes, "id"); + const keyPath = deserializeKeyPath( + expectDbStringOrNull(objRes, "key_path"), + ); + const autoInc = expectDbNumber(objRes, "auto_increment"); + const indexSet: string[] = []; + const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({ + object_store_id: objectStoreId, + }); + for (const idxInfo of indexRes) { + const indexName = expectDbString(idxInfo, "name"); + indexSet.push(indexName); + } + return { + keyPath, + autoIncrement: autoInc != 0, + indexSet, + }; + } + + getIndexMeta( + dbConn: DatabaseConnection, + objectStoreName: string, + indexName: string, + ): IndexMeta | undefined { + // FIXME: Use cached info from the connection for this! + const connInfo = this.connectionMap.get(dbConn.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({ + name: objectStoreName, + database_name: connInfo.databaseName, + }); + if (!objRes) { + throw Error("object store not found"); + } + const objectStoreId = expectDbNumber(objRes, "id"); + const idxInfo = this._prep(sqlGetIndexByName).getFirst({ + object_store_id: objectStoreId, + name: indexName, + }); + const indexUnique = expectDbNumber(idxInfo, "unique_index"); + const indexMultiEntry = expectDbNumber(idxInfo, "multientry"); + const indexKeyPath = deserializeKeyPath( + expectDbString(idxInfo, "key_path"), + ); + if (!indexKeyPath) { + throw Error("db inconsistent"); + } + return { + keyPath: indexKeyPath, + multiEntry: indexMultiEntry != 0, + unique: indexUnique != 0, + }; + } + + async getDatabases(): Promise { + const dbList = this._prep(sqlListDatabases).getAll(); + let res: BridgeIDBDatabaseInfo[] = []; + for (const r of dbList) { + res.push({ + name: (r as any).name, + version: (r as any).version, + }); + } + + return res; + } + + private _loadObjectStoreNames(databaseName: string): string[] { + const objectStoreNames: string[] = []; + const storesRes = this._prep(sqlGetObjectStoresByDatabase).getAll({ + database_name: databaseName, + }); + for (const res of storesRes) { + assertDbInvariant(res != null && typeof res === "object"); + assertDbInvariant("name" in res); + const storeName = res.name; + assertDbInvariant(typeof storeName === "string"); + objectStoreNames.push(storeName); + } + return objectStoreNames; + } + + async connectDatabase(databaseName: string): Promise { + const connectionId = this.connectionIdCounter++; + const connectionCookie = `connection-${connectionId}`; + + // Wait until no transaction is active anymore. + while (1) { + if (this.txLevel == TransactionLevel.None) { + break; + } + await this.transactionDoneCond.wait(); + } + + this._prep(sqlBegin).run(); + const versionRes = this._prep(sqlGetDatabaseVersion).getFirst({ + name: databaseName, + }); + let ver: number; + if (versionRes == undefined) { + this._prep(sqlCreateDatabase).run({ name: databaseName }); + ver = 0; + } else { + const verNum = expectDbNumber(versionRes, "version"); + assertDbInvariant(typeof verNum === "number"); + ver = verNum; + } + const objectStoreNames: string[] = this._loadObjectStoreNames(databaseName); + + this._prep(sqlCommit).run(); + + this.connectionMap.set(connectionCookie, { + databaseName: databaseName, + }); + + return { + conn: { + connectionCookie, + }, + version: ver, + objectStores: objectStoreNames, + }; + } + + private _loadScopeInfo(connInfo: ConnectionInfo, storeName: string): void { + const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({ + name: storeName, + database_name: connInfo.databaseName, + }); + if (!objRes) { + throw Error("object store not found"); + } + const objectStoreId = expectDbNumber(objRes, "id"); + const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({ + object_store_id: objectStoreId, + }); + if (!indexRes) { + throw Error("db inconsistent"); + } + const indexMap = new Map(); + for (const idxInfo of indexRes) { + const indexId = expectDbNumber(idxInfo, "id"); + const indexName = expectDbString(idxInfo, "name"); + const indexUnique = expectDbNumber(idxInfo, "unique_index"); + const indexMultiEntry = expectDbNumber(idxInfo, "multientry"); + const indexKeyPath = deserializeKeyPath( + expectDbString(idxInfo, "key_path"), + ); + if (!indexKeyPath) { + throw Error("db inconsistent"); + } + indexMap.set(indexName, { + indexId, + keyPath: indexKeyPath, + multiEntry: indexMultiEntry != 0, + unique: indexUnique != 0, + }); + } + this.txScope.set(storeName, { + objectStoreId, + indexMap, + }); + } + + async beginTransaction( + conn: DatabaseConnection, + objectStores: string[], + mode: IDBTransactionMode, + ): Promise { + const connInfo = this.connectionMap.get(conn.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + const transactionCookie = `tx-${this.transactionIdCounter++}`; + + while (1) { + if (this.txLevel === TransactionLevel.None) { + break; + } + await this.transactionDoneCond.wait(); + } + + if (this.trackStats) { + if (mode === "readonly") { + this.accessStats.readTransactions++; + } else if (mode === "readwrite") { + this.accessStats.writeTransactions++; + } + } + + this._prep(sqlBegin).run(); + if (mode === "readonly") { + this.txLevel = TransactionLevel.Read; + } else if (mode === "readwrite") { + this.txLevel = TransactionLevel.Write; + } + + this.transactionMap.set(transactionCookie, { + connectionCookie: conn.connectionCookie, + }); + + // FIXME: We should check this + // if (this.txScope.size != 0) { + // // Something didn't clean up! + // throw Error("scope not empty"); + // } + this.txScope.clear(); + + // FIXME: Use cached info from connection? + for (const storeName of objectStores) { + this._loadScopeInfo(connInfo, storeName); + } + + return { + transactionCookie, + }; + } + + async enterVersionChange( + conn: DatabaseConnection, + newVersion: number, + ): Promise { + const connInfo = this.connectionMap.get(conn.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.enableTracing) { + console.log( + `entering version change transaction (conn ${conn.connectionCookie}), newVersion=${newVersion}`, + ); + } + const transactionCookie = `tx-vc-${this.transactionIdCounter++}`; + + while (1) { + if (this.txLevel === TransactionLevel.None) { + break; + } + await this.transactionDoneCond.wait(); + } + + // FIXME: We should check this + // if (this.txScope.size != 0) { + // // Something didn't clean up! + // throw Error("scope not empty"); + // } + this.txScope.clear(); + + if (this.enableTracing) { + console.log(`version change transaction unblocked`); + } + + this._prep(sqlBegin).run(); + this.txLevel = TransactionLevel.VersionChange; + + this.transactionMap.set(transactionCookie, { + connectionCookie: conn.connectionCookie, + }); + + this._prep(sqlUpdateDbVersion).run({ + name: connInfo.databaseName, + version: newVersion, + }); + + const objectStoreNames = this._loadObjectStoreNames(connInfo.databaseName); + + // FIXME: Use cached info from connection? + for (const storeName of objectStoreNames) { + this._loadScopeInfo(connInfo, storeName); + } + + return { + transactionCookie, + }; + } + + async deleteDatabase(databaseName: string): Promise { + // FIXME: Wait until connection queue is not blocked + // FIXME: To properly implement the spec semantics, maybe + // split delete into prepareDelete and executeDelete? + + while (this.txLevel !== TransactionLevel.None) { + await this.transactionDoneCond.wait(); + } + + this._prep(sqlBegin).run(); + const objectStoreNames = this._loadObjectStoreNames(databaseName); + for (const storeName of objectStoreNames) { + const objRes = this._prep(sqlGetObjectStoreMetaByName).getFirst({ + name: storeName, + database_name: databaseName, + }); + if (!objRes) { + throw Error("object store not found"); + } + const objectStoreId = expectDbNumber(objRes, "id"); + const indexRes = this._prep(sqlGetIndexesByObjectStoreId).getAll({ + object_store_id: objectStoreId, + }); + if (!indexRes) { + throw Error("db inconsistent"); + } + const indexMap = new Map(); + for (const idxInfo of indexRes) { + const indexId = expectDbNumber(idxInfo, "id"); + const indexName = expectDbString(idxInfo, "name"); + const indexUnique = expectDbNumber(idxInfo, "unique_index"); + const indexMultiEntry = expectDbNumber(idxInfo, "multientry"); + const indexKeyPath = deserializeKeyPath( + expectDbString(idxInfo, "key_path"), + ); + if (!indexKeyPath) { + throw Error("db inconsistent"); + } + indexMap.set(indexName, { + indexId, + keyPath: indexKeyPath, + multiEntry: indexMultiEntry != 0, + unique: indexUnique != 0, + }); + } + this.txScope.set(storeName, { + objectStoreId, + indexMap, + }); + + for (const indexInfo of indexMap.values()) { + let stmt: Sqlite3Statement; + if (indexInfo.unique) { + stmt = this._prep(sqlIUniqueIndexDataDeleteAll); + } else { + stmt = this._prep(sqlIndexDataDeleteAll); + } + stmt.run({ + index_id: indexInfo.indexId, + }); + this._prep(sqlIndexDelete).run({ + index_id: indexInfo.indexId, + }); + } + this._prep(sqlObjectDataDeleteAll).run({ + object_store_id: objectStoreId, + }); + this._prep(sqlObjectStoreDelete).run({ + object_store_id: objectStoreId, + }); + } + this._prep(sqlDeleteDatabase).run({ + name: databaseName, + }); + this._prep(sqlCommit).run(); + } + + async close(db: DatabaseConnection): Promise { + const connInfo = this.connectionMap.get(db.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + // FIXME: What if we're in a transaction? Does the backend interface allow this? + // if (this.txLevel !== TransactionLevel.None) { + // throw Error("can't close while in transaction"); + // } + if (this.enableTracing) { + console.log(`closing connection ${db.connectionCookie}`); + } + this.connectionMap.delete(db.connectionCookie); + } + + renameObjectStore( + btx: DatabaseTransaction, + oldName: string, + newName: string, + ): void { + if (this.enableTracing) { + console.log(`renaming object store '${oldName}' to '${newName}'`); + } + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction required"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("not connected"); + } + // FIXME: Would be much nicer with numeric UID handles + const scopeInfo = this.txScope.get(oldName); + if (!scopeInfo) { + throw Error("object store not found"); + } + this.txScope.delete(oldName); + this.txScope.set(newName, scopeInfo); + this._prep(sqlRenameObjectStore).run({ + object_store_id: scopeInfo.objectStoreId, + name: newName, + }); + } + + renameIndex( + btx: DatabaseTransaction, + objectStoreName: string, + oldIndexName: string, + newIndexName: string, + ): void { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction required"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("not connected"); + } + // FIXME: Would be much nicer with numeric UID handles + const scopeInfo = this.txScope.get(objectStoreName); + if (!scopeInfo) { + throw Error("object store not found"); + } + const indexInfo = scopeInfo.indexMap.get(oldIndexName); + if (!indexInfo) { + throw Error("index not found"); + } + // FIXME: Would also be much nicer with numeric UID handles + scopeInfo.indexMap.delete(oldIndexName); + scopeInfo.indexMap.set(newIndexName, indexInfo); + this._prep(sqlRenameIndex).run({ + index_id: indexInfo.indexId, + name: newIndexName, + }); + } + + deleteObjectStore(btx: DatabaseTransaction, name: string): void { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction required"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("not connected"); + } + // FIXME: Would be much nicer with numeric UID handles + const scopeInfo = this.txScope.get(name); + if (!scopeInfo) { + throw Error("object store not found"); + } + for (const indexInfo of scopeInfo.indexMap.values()) { + let stmt: Sqlite3Statement; + if (indexInfo.unique) { + stmt = this._prep(sqlIUniqueIndexDataDeleteAll); + } else { + stmt = this._prep(sqlIndexDataDeleteAll); + } + stmt.run({ + index_id: indexInfo.indexId, + }); + this._prep(sqlIndexDelete).run({ + index_id: indexInfo.indexId, + }); + } + this._prep(sqlObjectDataDeleteAll).run({ + object_store_id: scopeInfo.objectStoreId, + }); + this._prep(sqlObjectStoreDelete).run({ + object_store_id: scopeInfo.objectStoreId, + }); + this.txScope.delete(name); + } + + deleteIndex( + btx: DatabaseTransaction, + objectStoreName: string, + indexName: string, + ): void { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction required"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("not connected"); + } + // FIXME: Would be much nicer with numeric UID handles + const scopeInfo = this.txScope.get(objectStoreName); + if (!scopeInfo) { + throw Error("object store not found"); + } + const indexInfo = scopeInfo.indexMap.get(indexName); + if (!indexInfo) { + throw Error("index not found"); + } + scopeInfo.indexMap.delete(indexName); + let stmt: Sqlite3Statement; + if (indexInfo.unique) { + stmt = this._prep(sqlIUniqueIndexDataDeleteAll); + } else { + stmt = this._prep(sqlIndexDataDeleteAll); + } + stmt.run({ + index_id: indexInfo.indexId, + }); + this._prep(sqlIndexDelete).run({ + index_id: indexInfo.indexId, + }); + } + + async rollback(btx: DatabaseTransaction): Promise { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + if (this.enableTracing) { + console.log(`rolling back transaction ${btx.transactionCookie}`); + } + if (this.txLevel === TransactionLevel.None) { + return; + } + this._prep(sqlRollback).run(); + this.txLevel = TransactionLevel.None; + this.transactionMap.delete(btx.transactionCookie); + this.txScope.clear(); + this.transactionDoneCond.trigger(); + } + + async commit(btx: DatabaseTransaction): Promise { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + if (this.enableTracing) { + console.log(`committing transaction ${btx.transactionCookie}`); + } + if (this.txLevel === TransactionLevel.None) { + return; + } + this._prep(sqlCommit).run(); + this.txLevel = TransactionLevel.None; + this.txScope.clear(); + this.transactionMap.delete(btx.transactionCookie); + this.transactionDoneCond.trigger(); + } + + createObjectStore( + btx: DatabaseTransaction, + name: string, + keyPath: string | string[] | null, + autoIncrement: boolean, + ): void { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.txLevel < TransactionLevel.VersionChange) { + throw Error("only allowed in versionchange transaction"); + } + if (this.txScope.has(name)) { + throw Error("object store already exists"); + } + let myKeyPath = serializeKeyPath(keyPath); + const runRes = this._prep(sqlCreateObjectStore).run({ + name, + key_path: myKeyPath, + auto_increment: autoIncrement ? 1 : 0, + database_name: connInfo.databaseName, + }); + this.txScope.set(name, { + objectStoreId: runRes.lastInsertRowid, + indexMap: new Map(), + }); + } + + createIndex( + btx: DatabaseTransaction, + indexName: string, + objectStoreName: string, + keyPath: string | string[], + multiEntry: boolean, + unique: boolean, + ): void { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.txLevel < TransactionLevel.VersionChange) { + throw Error("only allowed in versionchange transaction"); + } + const scopeInfo = this.txScope.get(objectStoreName); + if (!scopeInfo) { + throw Error("object store does not exist, can't create index"); + } + if (scopeInfo.indexMap.has(indexName)) { + throw Error("index already exists"); + } + + if (this.enableTracing) { + console.log(`creating index "${indexName}"`); + } + + const res = this._prep(sqlCreateIndex).run({ + object_store_id: scopeInfo.objectStoreId, + name: indexName, + key_path: serializeKeyPath(keyPath), + unique: unique ? 1 : 0, + multientry: multiEntry ? 1 : 0, + }); + const scopeIndexInfo: ScopeIndexInfo = { + indexId: res.lastInsertRowid, + keyPath, + multiEntry, + unique, + }; + scopeInfo.indexMap.set(indexName, scopeIndexInfo); + + // FIXME: We can't use an iterator here, as it's not allowed to + // execute a write statement while the iterator executes. + // Maybe do multiple selects instead of loading everything into memory? + const keyRowsRes = this._prep(sqlObjectDataGetAll).getAll({ + object_store_id: scopeInfo.objectStoreId, + }); + + for (const keyRow of keyRowsRes) { + assertDbInvariant(typeof keyRow === "object" && keyRow != null); + assertDbInvariant("key" in keyRow); + assertDbInvariant("value" in keyRow); + assertDbInvariant(typeof keyRow.value === "string"); + const key = keyRow.key; + const value = structuredRevive(JSON.parse(keyRow.value)); + assertDbInvariant(key instanceof Uint8Array); + try { + this.insertIntoIndex(scopeIndexInfo, key, value); + } catch (e) { + // FIXME: Catch this in insertIntoIndex! + if (e instanceof DataError) { + // https://www.w3.org/TR/IndexedDB-2/#object-store-storage-operation + // Do nothing + } else { + throw e; + } + } + } + } + + async deleteRecord( + btx: DatabaseTransaction, + objectStoreName: string, + range: BridgeIDBKeyRange, + ): Promise { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.txLevel < TransactionLevel.Write) { + throw Error("store operation only allowed while running a transaction"); + } + const scopeInfo = this.txScope.get(objectStoreName); + if (!scopeInfo) { + throw Error( + `object store ${JSON.stringify( + objectStoreName, + )} not in transaction scope`, + ); + } + + // PERF: We delete keys one-by-one here. + // Instead, we could do it with a single + // delete query for the object data / index data. + + let currKey: Uint8Array | null = null; + + if (range.lower != null) { + const targetKey = serializeKey(range.lower); + currKey = this._continueObjectKey({ + objectStoreId: scopeInfo.objectStoreId, + currentKey: null, + forward: true, + inclusive: true, + targetKey, + }); + } else { + currKey = this._startObjectKey(scopeInfo.objectStoreId, true); + } + + let upperBound: Uint8Array | undefined; + if (range.upper != null) { + upperBound = serializeKey(range.upper); + } + + // loop invariant: (currKey is undefined) or (currKey is a valid key) + while (true) { + if (!currKey) { + break; + } + + // FIXME: Check if we're past the range! + if (upperBound != null) { + const cmp = compareSerializedKeys(currKey, upperBound); + if (cmp > 0) { + break; + } + if (cmp == 0 && range.upperOpen) { + break; + } + } + + // Now delete! + + this._prep(sqlObjectDataDeleteKey).run({ + object_store_id: scopeInfo.objectStoreId, + key: currKey, + }); + + for (const index of scopeInfo.indexMap.values()) { + let stmt: Sqlite3Statement; + if (index.unique) { + stmt = this._prep(sqlUniqueIndexDataDeleteKey); + } else { + stmt = this._prep(sqlIndexDataDeleteKey); + } + stmt.run({ + index_id: index.indexId, + object_key: currKey, + }); + } + + currKey = this._continueObjectKey({ + objectStoreId: scopeInfo.objectStoreId, + currentKey: null, + forward: true, + inclusive: false, + targetKey: currKey, + }); + } + } + + async storeRecord( + btx: DatabaseTransaction, + storeReq: RecordStoreRequest, + ): Promise { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.txLevel < TransactionLevel.Write) { + throw Error("store operation only allowed while running a transaction"); + } + const scopeInfo = this.txScope.get(storeReq.objectStoreName); + if (!scopeInfo) { + throw Error( + `object store ${JSON.stringify( + storeReq.objectStoreName, + )} not in transaction scope`, + ); + } + const metaRes = this._prep(sqlGetObjectStoreMetaById).getFirst({ + id: scopeInfo.objectStoreId, + }); + if (metaRes === undefined) { + throw Error( + `object store ${JSON.stringify( + storeReq.objectStoreName, + )} does not exist`, + ); + } + assertDbInvariant(!!metaRes && typeof metaRes === "object"); + assertDbInvariant("key_path" in metaRes); + assertDbInvariant("auto_increment" in metaRes); + const dbKeyPath = metaRes.key_path; + assertDbInvariant(dbKeyPath === null || typeof dbKeyPath === "string"); + const keyPath = deserializeKeyPath(dbKeyPath); + const autoIncrement = metaRes.auto_increment; + assertDbInvariant(typeof autoIncrement === "number"); + + let key; + let value; + let updatedKeyGenerator: number | undefined; + + if (storeReq.storeLevel === StoreLevel.UpdateExisting) { + if (storeReq.key == null) { + throw Error("invalid update request (key not given)"); + } + key = storeReq.key; + value = storeReq.value; + } else { + if (keyPath != null && storeReq.key !== undefined) { + // If in-line keys are used, a key can't be explicitly specified. + throw new DataError(); + } + + const storeKeyResult = makeStoreKeyValue({ + value: storeReq.value, + key: storeReq.key, + currentKeyGenerator: autoIncrement, + autoIncrement: autoIncrement != 0, + keyPath: keyPath, + }); + + if (autoIncrement != 0) { + updatedKeyGenerator = storeKeyResult.updatedKeyGenerator; + } + + key = storeKeyResult.key; + value = storeKeyResult.value; + } + + const serializedObjectKey = serializeKey(key); + + const existingObj = this._getObjectValue( + scopeInfo.objectStoreId, + serializedObjectKey, + ); + + if (storeReq.storeLevel === StoreLevel.NoOverwrite) { + if (existingObj) { + throw new ConstraintError(); + } + } + + this._prep(sqlInsertObjectData).run({ + object_store_id: scopeInfo.objectStoreId, + key: serializedObjectKey, + value: JSON.stringify(structuredEncapsulate(value)), + }); + + if (autoIncrement != 0) { + this._prep(sqlUpdateAutoIncrement).run({ + object_store_id: scopeInfo.objectStoreId, + auto_increment: updatedKeyGenerator, + }); + } + + for (const [k, indexInfo] of scopeInfo.indexMap.entries()) { + if (existingObj) { + this.deleteFromIndex( + indexInfo.indexId, + indexInfo.unique, + serializedObjectKey, + ); + } + + try { + this.insertIntoIndex(indexInfo, serializedObjectKey, value); + } catch (e) { + // FIXME: handle this in insertIntoIndex! + if (e instanceof DataError) { + // We don't propagate this error here. + continue; + } + throw e; + } + } + + if (this.trackStats) { + this.accessStats.writesPerStore[storeReq.objectStoreName] = + (this.accessStats.writesPerStore[storeReq.objectStoreName] ?? 0) + 1; + } + + return { + key: key, + }; + } + + private deleteFromIndex( + indexId: SqliteRowid, + indexUnique: boolean, + objectKey: Uint8Array, + ): void { + let stmt: Sqlite3Statement; + if (indexUnique) { + stmt = this._prep(sqlUniqueIndexDataDeleteKey); + } else { + stmt = this._prep(sqlIndexDataDeleteKey); + } + stmt.run({ + index_id: indexId, + object_key: objectKey, + }); + } + + private insertIntoIndex( + indexInfo: ScopeIndexInfo, + primaryKey: Uint8Array, + value: any, + ): void { + const indexKeys = getIndexKeys( + value, + indexInfo.keyPath, + indexInfo.multiEntry, + ); + if (!indexKeys.length) { + return; + } + + let stmt; + if (indexInfo.unique) { + stmt = this._prep(sqlInsertUniqueIndexData); + } else { + stmt = this._prep(sqlInsertIndexData); + } + + for (const indexKey of indexKeys) { + // FIXME: Re-throw correct error for unique index violations + const serializedIndexKey = serializeKey(indexKey); + try { + stmt.run({ + index_id: indexInfo.indexId, + object_key: primaryKey, + index_key: serializedIndexKey, + }); + } catch (e: any) { + if (e.code === SqliteError.constraintPrimarykey) { + throw new ConstraintError(); + } + throw e; + } + } + } + + clearObjectStore( + btx: DatabaseTransaction, + objectStoreName: string, + ): Promise { + const txInfo = this.transactionMap.get(btx.transactionCookie); + if (!txInfo) { + throw Error("transaction not found"); + } + const connInfo = this.connectionMap.get(txInfo.connectionCookie); + if (!connInfo) { + throw Error("connection not found"); + } + if (this.txLevel < TransactionLevel.Write) { + throw Error("store operation only allowed while running a transaction"); + } + const scopeInfo = this.txScope.get(objectStoreName); + if (!scopeInfo) { + throw Error( + `object store ${JSON.stringify( + objectStoreName, + )} not in transaction scope`, + ); + } + + throw new Error("Method not implemented."); + } +} + +const schemaSql = ` +CREATE TABLE IF NOT EXISTS databases +( name TEXT PRIMARY KEY +, version INTEGER NOT NULL +); + +CREATE TABLE IF NOT EXISTS object_stores +( id INTEGER PRIMARY KEY +, database_name NOT NULL +, name TEXT NOT NULL +, key_path TEXT +, auto_increment INTEGER NOT NULL DEFAULT 0 +, FOREIGN KEY (database_name) + REFERENCES databases(name) +); + +CREATE TABLE IF NOT EXISTS indexes +( id INTEGER PRIMARY KEY +, object_store_id INTEGER NOT NULL +, name TEXT NOT NULL +, key_path TEXT NOT NULL +, unique_index INTEGER NOT NULL +, multientry INTEGER NOT NULL +, FOREIGN KEY (object_store_id) + REFERENCES object_stores(id) +); + +CREATE TABLE IF NOT EXISTS object_data +( object_store_id INTEGER NOT NULL +, key BLOB NOT NULL +, value TEXT NOT NULL +, PRIMARY KEY (object_store_id, key) +); + +CREATE TABLE IF NOT EXISTS index_data +( index_id INTEGER NOT NULL +, index_key BLOB NOT NULL +, object_key BLOB NOT NULL +, PRIMARY KEY (index_id, index_key, object_key) +, FOREIGN KEY (index_id) + REFERENCES indexes(id) +); + +CREATE TABLE IF NOT EXISTS unique_index_data +( index_id INTEGER NOT NULL +, index_key BLOB NOT NULL +, object_key BLOB NOT NULL +, PRIMARY KEY (index_id, index_key) +, FOREIGN KEY (index_id) + REFERENCES indexes(id) +); +`; + +const sqlListDatabases = ` +SELECT name, version FROM databases; +`; + +const sqlGetDatabaseVersion = ` +SELECT version FROM databases WHERE name=$name; +`; + +const sqlBegin = `BEGIN;`; +const sqlCommit = `COMMIT;`; +const sqlRollback = `ROLLBACK;`; + +const sqlCreateDatabase = ` +INSERT INTO databases (name, version) VALUES ($name, 1); +`; + +const sqlDeleteDatabase = ` +DELETE FROM databases +WHERE name=$name; +`; + +const sqlCreateObjectStore = ` +INSERT INTO object_stores (name, database_name, key_path, auto_increment) + VALUES ($name, $database_name, $key_path, $auto_increment); +`; + +const sqlObjectStoreDelete = ` +DELETE FROM object_stores +WHERE id=$object_store_id;`; + +const sqlObjectDataDeleteAll = ` +DELETE FROM object_data +WHERE object_store_id=$object_store_id`; + +const sqlIndexDelete = ` +DELETE FROM indexes +WHERE id=$index_id; +`; + +const sqlIndexDataDeleteAll = ` +DELETE FROM index_data +WHERE index_id=$index_id; +`; + +const sqlIUniqueIndexDataDeleteAll = ` +DELETE FROM unique_index_data +WHERE index_id=$index_id; +`; + +const sqlCreateIndex = ` +INSERT INTO indexes (object_store_id, name, key_path, unique_index, multientry) + VALUES ($object_store_id, $name, $key_path, $unique, $multientry); +`; + +const sqlInsertIndexData = ` +INSERT INTO index_data (index_id, object_key, index_key) + VALUES ($index_id, $object_key, $index_key);`; + +const sqlInsertUniqueIndexData = ` +INSERT INTO unique_index_data (index_id, object_key, index_key) + VALUES ($index_id, $object_key, $index_key);`; + +const sqlUpdateDbVersion = ` +UPDATE databases + SET version=$version + WHERE name=$name; +`; + +const sqlRenameObjectStore = ` +UPDATE object_stores + SET name=$name + WHERE id=$object_store_id`; + +const sqlRenameIndex = ` +UPDATE indexes + SET name=$name + WHERE index_id=$index_id`; + +const sqlGetObjectStoresByDatabase = ` +SELECT id, name, key_path, auto_increment +FROM object_stores +WHERE database_name=$database_name; +`; + +const sqlGetObjectStoreMetaById = ` +SELECT key_path, auto_increment +FROM object_stores +WHERE id = $id; +`; + +const sqlGetObjectStoreMetaByName = ` +SELECT id, key_path, auto_increment +FROM object_stores +WHERE database_name=$database_name AND name=$name; +`; + +const sqlGetIndexesByObjectStoreId = ` +SELECT id, name, key_path, unique_index, multientry +FROM indexes +WHERE object_store_id=$object_store_id +`; + +const sqlGetIndexByName = ` +SELECT id, key_path, unique_index, multientry +FROM indexes +WHERE object_store_id=$object_store_id + AND name=$name +`; + +const sqlInsertObjectData = ` +INSERT OR REPLACE INTO object_data(object_store_id, key, value) + VALUES ($object_store_id, $key, $value); +`; + +const sqlUpdateAutoIncrement = ` +UPDATE object_stores + SET auto_increment=$auto_increment + WHERE id=$object_store_id +`; + +const sqlObjectDataValueFromKey = ` +SELECT value FROM object_data + WHERE object_store_id=$object_store_id + AND key=$key; +`; + +const sqlObjectDataGetAll = ` +SELECT key, value FROM object_data + WHERE object_store_id=$object_store_id;`; + +const sqlObjectDataStartForward = ` +SELECT min(key) as rkey FROM object_data + WHERE object_store_id=$object_store_id;`; + +const sqlObjectDataStartBackward = ` +SELECT max(key) as rkey FROM object_data + WHERE object_store_id=$object_store_id;`; + +const sqlObjectDataContinueForward = ` +SELECT min(key) as rkey FROM object_data + WHERE object_store_id=$object_store_id + AND key > $x;`; + +const sqlObjectDataContinueBackward = ` +SELECT max(key) as rkey FROM object_data + WHERE object_store_id=$object_store_id + AND key < $x;`; + +const sqlObjectDataContinueForwardInclusive = ` +SELECT min(key) as rkey FROM object_data + WHERE object_store_id=$object_store_id + AND key >= $x;`; + +const sqlObjectDataContinueBackwardInclusive = ` +SELECT max(key) as rkey FROM object_data + WHERE object_store_id=$object_store_id + AND key <= $x;`; + +const sqlObjectDataDeleteKey = ` +DELETE FROM object_data + WHERE object_store_id=$object_store_id AND + key=$key`; + +const sqlIndexDataDeleteKey = ` +DELETE FROM index_data + WHERE index_id=$index_id AND + object_key=$object_key; +`; + +const sqlUniqueIndexDataDeleteKey = ` +DELETE FROM unique_index_data + WHERE index_id=$index_id AND + object_key=$object_key; +`; + +// "next" or "nextunique" on a non-unique index +const sqlIndexDataStartForward = ` +SELECT index_key, object_key FROM index_data + WHERE index_id=$index_id + ORDER BY index_key, object_key + LIMIT 1; +`; + +// start a "next" or "nextunique" on a unique index +const sqlUniqueIndexDataStartForward = ` +SELECT index_key, object_key FROM unique_index_data + WHERE index_id=$index_id + ORDER BY index_key, object_key + LIMIT 1; +`; + +// start a "prev" or "prevunique" on a unique index +const sqlUniqueIndexDataStartBackward = ` +SELECT index_key, object_key FROM unique_index_data + WHERE index_id=$index_id + ORDER BY index_key DESC, object_key DESC + LIMIT 1 +`; + +// start a "prevunique" query on a non-unique index +const sqlIndexDataStartBackwardUnique = ` +SELECT index_key, object_key FROM index_data + WHERE index_id=$index_id + ORDER BY index_key DESC, object_key ASC + LIMIT 1 +`; + +// start a "prev" query on a non-unique index +const sqlIndexDataStartBackward = ` +SELECT index_key, object_key FROM index_data + WHERE index_id=$index_id + ORDER BY index_key DESC, object_key DESC + LIMIT 1 +`; + +// continue a "next" query, strictly go to a further key +const sqlIndexDataContinueForwardStrict = ` +SELECT index_key, object_key FROM index_data + WHERE + index_id=$index_id AND + ((index_key = $index_key AND object_key > $object_key) OR + (index_key > $index_key)) + ORDER BY index_key, object_key + LIMIT 1; +`; + +// continue a "next" query, go to at least the specified key +const sqlIndexDataContinueForwardInclusive = ` +SELECT index_key, object_key FROM index_data + WHERE + index_id=$index_id AND + ((index_key = $index_key AND object_key >= $object_key) OR + (index_key > $index_key)) + ORDER BY index_key, object_key + LIMIT 1; +`; + +// continue a "prev" query +const sqlIndexDataContinueBackwardStrict = ` +SELECT index_key, object_key FROM index_data + WHERE + index_id=$index_id AND + ((index_key = $index_key AND object_key < $object_key) OR + (index_key < $index_key)) + ORDER BY index_key DESC, object_key DESC + LIMIT 1; +`; + +// continue a "prev" query +const sqlIndexDataContinueBackwardInclusive = ` +SELECT index_key, object_key FROM index_data + WHERE + index_id=$index_id AND + ((index_key = $index_key AND object_key <= $object_key) OR + (index_key < $index_key)) + ORDER BY index_key DESC, object_key DESC + LIMIT 1; +`; + +// continue a "prevunique" query +const sqlIndexDataContinueBackwardStrictUnique = ` +SELECT index_key, object_key FROM index_data + WHERE index_id=$index_id AND index_key < $index_key + ORDER BY index_key DESC, object_key ASC + LIMIT 1; +`; + +// continue a "prevunique" query +const sqlIndexDataContinueBackwardInclusiveUnique = ` +SELECT index_key, object_key FROM index_data + WHERE index_id=$index_id AND index_key <= $index_key + ORDER BY index_key DESC, object_key ASC + LIMIT 1; +`; + +// continue a "next" query, no target object key +const sqlIndexDataContinueForwardStrictUnique = ` +SELECT index_key, object_key FROM index_data + WHERE index_id=$index_id AND index_key > $index_key + ORDER BY index_key, object_key + LIMIT 1; +`; + +// continue a "next" query, no target object key +const sqlIndexDataContinueForwardInclusiveUnique = ` +SELECT index_key, object_key FROM index_data + WHERE index_id=$index_id AND index_key >= $index_key + ORDER BY index_key, object_key + LIMIT 1; +`; + +// continue a "next" query, strictly go to a further key +const sqlUniqueIndexDataContinueForwardStrict = ` +SELECT index_key, object_key FROM unique_index_data + WHERE index_id=$index_id AND index_key > $index_key + ORDER BY index_key, object_key + LIMIT 1; +`; + +// continue a "next" query, go to at least the specified key +const sqlUniqueIndexDataContinueForwardInclusive = ` +SELECT index_key, object_key FROM unique_index_data + WHERE index_id=$index_id AND index_key >= $index_key + ORDER BY index_key, object_key + LIMIT 1; +`; + +// continue a "prev" query +const sqlUniqueIndexDataContinueBackwardStrict = ` +SELECT index_key, object_key FROM unique_index_data + WHERE index_id=$index_id AND index_key < $index_key + ORDER BY index_key, object_key + LIMIT 1; +`; + +// continue a "prev" query +const sqlUniqueIndexDataContinueBackwardInclusive = ` +SELECT index_key, object_key FROM unique_index_data + WHERE index_id=$index_id AND index_key <= $index_key + ORDER BY index_key DESC, object_key DESC + LIMIT 1; +`; + +export interface SqliteBackendOptions { + filename: string; +} + +export async function createSqliteBackend( + sqliteImpl: Sqlite3Interface, + options: SqliteBackendOptions, +): Promise { + const db = sqliteImpl.open(options.filename); + db.exec("PRAGMA foreign_keys = ON;"); + db.exec(schemaSql); + return new SqliteBackend(sqliteImpl, db); +} diff --git a/packages/idb-bridge/src/backend-common.ts b/packages/idb-bridge/src/backend-common.ts new file mode 100644 index 000000000..d52071939 --- /dev/null +++ b/packages/idb-bridge/src/backend-common.ts @@ -0,0 +1,29 @@ +import { openPromise } from "./util/openPromise.js"; + +export class AsyncCondition { + _waitPromise: Promise; + _resolveWaitPromise: () => void; + constructor() { + const op = openPromise(); + this._waitPromise = op.promise; + this._resolveWaitPromise = op.resolve; + } + + wait(): Promise { + return this._waitPromise; + } + + trigger(): void { + this._resolveWaitPromise(); + const op = openPromise(); + this._waitPromise = op.promise; + this._resolveWaitPromise = op.resolve; + } +} + +export enum TransactionLevel { + None = 0, + Read = 1, + Write = 2, + VersionChange = 3, +} diff --git a/packages/idb-bridge/src/backend-interface.ts b/packages/idb-bridge/src/backend-interface.ts index a21515544..3255261e2 100644 --- a/packages/idb-bridge/src/backend-interface.ts +++ b/packages/idb-bridge/src/backend-interface.ts @@ -21,66 +21,45 @@ import { IDBValidKey, } from "./idbtypes.js"; -/** @public */ -export interface ObjectStoreProperties { - keyPath: string[] | null; - autoIncrement: boolean; - indexes: { [nameame: string]: IndexProperties }; -} - -/** @public */ -export interface IndexProperties { - keyPath: string[]; - multiEntry: boolean; - unique: boolean; -} - -/** @public */ -export interface Schema { - databaseName: string; - databaseVersion: number; - objectStores: { [name: string]: ObjectStoreProperties }; +export interface ConnectResult { + conn: DatabaseConnection; + version: number; + objectStores: string[]; } -/** @public */ export interface DatabaseConnection { connectionCookie: string; } -/** @public */ export interface DatabaseTransaction { transactionCookie: string; } -/** @public */ export enum ResultLevel { OnlyCount, OnlyKeys, Full, } -/** @public */ export enum StoreLevel { NoOverwrite, AllowOverwrite, UpdateExisting, } -/** @public */ -export interface RecordGetRequest { + +export interface IndexGetQuery { direction: IDBCursorDirection; objectStoreName: string; - indexName: string | undefined; + indexName: string; /** * The range of keys to return. - * If indexName is defined, the range refers to the index keys. - * Otherwise it refers to the object store keys. + * The range refers to the index keys. */ range: BridgeIDBKeyRange | undefined | null; /** * Last cursor position in terms of the index key. - * Can only be specified if indexName is defined and - * lastObjectStorePosition is defined. + * Can only be specified if lastObjectStorePosition is defined. * * Must either be undefined or within range. */ @@ -92,8 +71,6 @@ export interface RecordGetRequest { /** * If specified, the index key of the results must be * greater or equal to advanceIndexKey. - * - * Only applicable if indexName is specified. */ advanceIndexKey?: IDBValidKey; /** @@ -109,7 +86,31 @@ export interface RecordGetRequest { resultLevel: ResultLevel; } -/** @public */ +export interface ObjectStoreGetQuery { + direction: IDBCursorDirection; + objectStoreName: string; + /** + * The range of keys to return. + * Refers to the object store keys. + */ + range: BridgeIDBKeyRange | undefined | null; + /** + * Last position in terms of the object store key. + */ + lastObjectStorePosition?: IDBValidKey; + /** + * If specified, the primary key of the results must be greater + * or equal to advancePrimaryKey. + */ + advancePrimaryKey?: IDBValidKey; + /** + * Maximum number of results to return. + * If 0, return all available results + */ + limit: number; + resultLevel: ResultLevel; +} + export interface RecordGetResponse { values: any[] | undefined; indexKeys: IDBValidKey[] | undefined; @@ -117,7 +118,6 @@ export interface RecordGetResponse { count: number; } -/** @public */ export interface RecordStoreRequest { objectStoreName: string; value: any; @@ -125,7 +125,6 @@ export interface RecordStoreRequest { storeLevel: StoreLevel; } -/** @public */ export interface RecordStoreResponse { /** * Key that the record was stored under in the object store. @@ -133,38 +132,79 @@ export interface RecordStoreResponse { key: IDBValidKey; } -/** @public */ +export interface ObjectStoreMeta { + indexSet: string[]; + keyPath: string | string[] | null; + autoIncrement: boolean; +} + +export interface IndexMeta { + keyPath: string | string[]; + multiEntry: boolean; + unique: boolean; +} + +// FIXME: Instead of refering to an object store by name, +// maybe refer to it via some internal, numeric ID? +// This would simplify renaming. export interface Backend { getDatabases(): Promise; - connectDatabase(name: string): Promise; + connectDatabase(name: string): Promise; beginTransaction( - conn: DatabaseConnection, + dbConn: DatabaseConnection, objectStores: string[], mode: IDBTransactionMode, ): Promise; enterVersionChange( - conn: DatabaseConnection, + dbConn: DatabaseConnection, newVersion: number, ): Promise; deleteDatabase(name: string): Promise; - close(db: DatabaseConnection): Promise; + close(dbConn: DatabaseConnection): Promise; - getSchema(db: DatabaseConnection): Schema; + // FIXME: Use this for connection + // prepareConnect() - acquires a lock, maybe enters a version change transaction? + // finishConnect() - after possible versionchange is done, allow others to connect - getCurrentTransactionSchema(btx: DatabaseTransaction): Schema; + /** + * Get metadata for an object store. + * + * When dbConn is running a version change transaction, + * the current schema (and not the initial schema) is returned. + * + * Caller may mutate the result, a new object + * is returned on each call. + */ + getObjectStoreMeta( + dbConn: DatabaseConnection, + objectStoreName: string, + ): ObjectStoreMeta | undefined; - getInitialTransactionSchema(btx: DatabaseTransaction): Schema; + /** + * Get metadata for an index. + * + * When dbConn is running a version change transaction, + * the current schema (and not the initial schema) is returned. + * + * Caller may mutate the result, a new object + * is returned on each call. + */ + getIndexMeta( + dbConn: DatabaseConnection, + objectStoreName: string, + indexName: string, + ): IndexMeta | undefined; renameIndex( btx: DatabaseTransaction, objectStoreName: string, - oldName: string, - newName: string, + oldIndexName: string, + newIndexName: string, ): void; deleteIndex( @@ -173,8 +213,9 @@ export interface Backend { indexName: string, ): void; - rollback(btx: DatabaseTransaction): Promise; + rollback(btx: DatabaseTransaction): void; + // FIXME: Should probably not be async commit(btx: DatabaseTransaction): Promise; deleteObjectStore(btx: DatabaseTransaction, name: string): void; @@ -207,9 +248,14 @@ export interface Backend { range: BridgeIDBKeyRange, ): Promise; - getRecords( + getObjectStoreRecords( + btx: DatabaseTransaction, + req: ObjectStoreGetQuery, + ): Promise; + + getIndexRecords( btx: DatabaseTransaction, - req: RecordGetRequest, + req: IndexGetQuery, ): Promise; storeRecord( diff --git a/packages/idb-bridge/src/backends.test.ts b/packages/idb-bridge/src/backends.test.ts new file mode 100644 index 000000000..684358eac --- /dev/null +++ b/packages/idb-bridge/src/backends.test.ts @@ -0,0 +1,740 @@ +/* + Copyright 2019 Florian Dold + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + or implied. See the License for the specific language governing + permissions and limitations under the License. + */ + +/** + * Tests that are backend-generic. + * See testingdb.ts for the backend selection in test runs. + */ + +/** + * Imports. + */ +import test from "ava"; +import { + BridgeIDBCursorWithValue, + BridgeIDBDatabase, + BridgeIDBFactory, + BridgeIDBKeyRange, + BridgeIDBTransaction, +} from "./bridge-idb.js"; +import { + IDBCursorDirection, + IDBCursorWithValue, + IDBDatabase, + IDBKeyRange, + IDBRequest, + IDBValidKey, +} from "./idbtypes.js"; +import { initTestIndexedDB, useTestIndexedDb } from "./testingdb.js"; +import { MemoryBackend } from "./MemoryBackend.js"; +import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js"; + +test.before("test DB initialization", initTestIndexedDB); + +test("Spec: Example 1 Part 1", async (t) => { + const idb = useTestIndexedDb(); + + const dbname = "library-" + new Date().getTime() + Math.random(); + + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result as BridgeIDBDatabase; + const store = db.createObjectStore("books", { keyPath: "isbn" }); + const titleIndex = store.createIndex("by_title", "title", { unique: true }); + const authorIndex = store.createIndex("by_author", "author"); + + // Populate with initial data. + store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); + store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); + store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); + }; + + await promiseFromRequest(request); + t.pass(); +}); + +test("Spec: Example 1 Part 2", async (t) => { + const idb = useTestIndexedDb(); + + const dbname = "library-" + new Date().getTime() + Math.random(); + + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("books", { keyPath: "isbn" }); + const titleIndex = store.createIndex("by_title", "title", { unique: true }); + const authorIndex = store.createIndex("by_author", "author"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + + t.is(db.name, dbname); + + const tx = db.transaction("books", "readwrite"); + tx.oncomplete = () => { + console.log("oncomplete called"); + }; + + const store = tx.objectStore("books"); + + store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); + store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); + store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); + + await promiseFromTransaction(tx); + + t.pass(); +}); + +test("duplicate index insertion", async (t) => { + const idb = useTestIndexedDb(); + + const dbname = "library-" + new Date().getTime() + Math.random(); + + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("books", { keyPath: "isbn" }); + const titleIndex = store.createIndex("by_title", "title", { unique: true }); + const authorIndex = store.createIndex("by_author", "author"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + + t.is(db.name, dbname); + + const tx = db.transaction("books", "readwrite"); + tx.oncomplete = () => { + console.log("oncomplete called"); + }; + + const store = tx.objectStore("books"); + + store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); + + // Change the index key, keep primary key (isbn) the same. + store.put({ title: "Water Buffaloes", author: "Bla", isbn: 234567 }); + store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); + + store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); + + await promiseFromTransaction(tx); + + const tx3 = db.transaction(["books"], "readonly"); + const store3 = tx3.objectStore("books"); + const index3 = store3.index("by_author"); + const request3 = index3.openCursor(); + + const authorList: string[] = []; + + await promiseFromRequest(request3); + while (request3.result != null) { + const cursor: IDBCursorWithValue = request3.result; + authorList.push(cursor.value.author); + cursor.continue(); + await promiseFromRequest(request3); + } + + t.deepEqual(authorList, ["Barney", "Fred", "Fred"]); + + t.pass(); +}); + +test("simple index iteration", async (t) => { + const idb = useTestIndexedDb(); + const dbname = "library-" + new Date().getTime() + Math.random(); + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("books", { keyPath: "isbn" }); + const titleIndex = store.createIndex("by_title", "title", { unique: true }); + const authorIndex = store.createIndex("by_author", "author"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + const tx = db.transaction("books", "readwrite"); + const store = tx.objectStore("books"); + + store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); + store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); + store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); + + await promiseFromTransaction(tx); + + const tx3 = db.transaction(["books"], "readonly"); + const store3 = tx3.objectStore("books"); + const index3 = store3.index("by_author"); + const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred")); + + await promiseFromRequest(request3); + + let cursor: BridgeIDBCursorWithValue | null; + cursor = request3.result as BridgeIDBCursorWithValue; + t.is(cursor.value.author, "Fred"); + t.is(cursor.value.isbn, 123456); + + cursor.continue(); + + await promiseFromRequest(request3); + + t.is(cursor.value.author, "Fred"); + t.is(cursor.value.isbn, 234567); + + cursor.continue(); + + await promiseFromRequest(request3); + + t.is(cursor.value, undefined); +}); + +test("Spec: Example 1 Part 3", async (t) => { + const idb = useTestIndexedDb(); + const dbname = "library-" + new Date().getTime() + Math.random(); + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("books", { keyPath: "isbn" }); + const titleIndex = store.createIndex("by_title", "title", { unique: true }); + const authorIndex = store.createIndex("by_author", "author"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + + t.is(db.name, dbname); + + const tx = db.transaction("books", "readwrite"); + + const store = tx.objectStore("books"); + + store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); + store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); + store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); + + await promiseFromTransaction(tx); + + const tx2 = db.transaction("books", "readonly"); + const store2 = tx2.objectStore("books"); + var index2 = store2.index("by_title"); + const request2 = index2.get("Bedrock Nights"); + const result2: any = await promiseFromRequest(request2); + + t.is(result2.author, "Barney"); + + const tx3 = db.transaction(["books"], "readonly"); + const store3 = tx3.objectStore("books"); + const index3 = store3.index("by_author"); + const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred")); + + await promiseFromRequest(request3); + + let cursor: BridgeIDBCursorWithValue | null; + cursor = request3.result as BridgeIDBCursorWithValue; + t.is(cursor.value.author, "Fred"); + t.is(cursor.value.isbn, 123456); + + cursor.continue(); + + await promiseFromRequest(request3); + + cursor = request3.result as BridgeIDBCursorWithValue; + t.is(cursor.value.author, "Fred"); + t.is(cursor.value.isbn, 234567); + + await promiseFromTransaction(tx3); + + const tx4 = db.transaction("books", "readonly"); + const store4 = tx4.objectStore("books"); + const request4 = store4.openCursor(); + + await promiseFromRequest(request4); + + cursor = request4.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.isbn, 123456); + + cursor.continue(); + + await promiseFromRequest(request4); + + cursor = request4.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.isbn, 234567); + + cursor.continue(); + + await promiseFromRequest(request4); + + cursor = request4.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.isbn, 345678); + + cursor.continue(); + await promiseFromRequest(request4); + + cursor = request4.result; + + t.is(cursor, null); + + const tx5 = db.transaction("books", "readonly"); + const store5 = tx5.objectStore("books"); + const index5 = store5.index("by_author"); + + const request5 = index5.openCursor(null, "next"); + + await promiseFromRequest(request5); + cursor = request5.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.author, "Barney"); + cursor.continue(); + + await promiseFromRequest(request5); + cursor = request5.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.author, "Fred"); + cursor.continue(); + + await promiseFromRequest(request5); + cursor = request5.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.author, "Fred"); + cursor.continue(); + + await promiseFromRequest(request5); + cursor = request5.result; + t.is(cursor, null); + + const request6 = index5.openCursor(null, "nextunique"); + + await promiseFromRequest(request6); + cursor = request6.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.author, "Barney"); + cursor.continue(); + + await promiseFromRequest(request6); + cursor = request6.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.author, "Fred"); + t.is(cursor.value.isbn, 123456); + cursor.continue(); + + await promiseFromRequest(request6); + cursor = request6.result; + t.is(cursor, null); + + console.log("---------------------------"); + + const request7 = index5.openCursor(null, "prevunique"); + await promiseFromRequest(request7); + cursor = request7.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.author, "Fred"); + t.is(cursor.value.isbn, 123456); + cursor.continue(); + + await promiseFromRequest(request7); + cursor = request7.result; + if (!cursor) { + throw new Error(); + } + t.is(cursor.value.author, "Barney"); + cursor.continue(); + + await promiseFromRequest(request7); + cursor = request7.result; + t.is(cursor, null); + + db.close(); + + t.pass(); +}); + +test("simple deletion", async (t) => { + const idb = useTestIndexedDb(); + const dbname = "library-" + new Date().getTime() + Math.random(); + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("books", { keyPath: "isbn" }); + const titleIndex = store.createIndex("by_title", "title", { unique: true }); + const authorIndex = store.createIndex("by_author", "author"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + const tx = db.transaction("books", "readwrite"); + tx.oncomplete = () => { + console.log("oncomplete called"); + }; + + const store = tx.objectStore("books"); + + store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); + store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); + store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); + + await promiseFromTransaction(tx); + + const tx2 = db.transaction("books", "readwrite"); + + const store2 = tx2.objectStore("books"); + + const req1 = store2.get(234567); + await promiseFromRequest(req1); + t.is(req1.readyState, "done"); + t.is(req1.result.author, "Fred"); + + store2.delete(123456); + + const req2 = store2.get(123456); + await promiseFromRequest(req2); + t.is(req2.readyState, "done"); + t.is(req2.result, undefined); + + const req3 = store2.get(234567); + await promiseFromRequest(req3); + t.is(req3.readyState, "done"); + t.is(req3.result.author, "Fred"); + + await promiseFromTransaction(tx2); + + t.pass(); +}); + +test("export", async (t) => { + const backend = new MemoryBackend(); + const idb = new BridgeIDBFactory(backend); + const dbname = "library-" + new Date().getTime() + Math.random(); + const request = idb.open(dbname, 42); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("books", { keyPath: "isbn" }); + const titleIndex = store.createIndex("by_title", "title", { unique: true }); + const authorIndex = store.createIndex("by_author", "author"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + + const tx = db.transaction("books", "readwrite"); + tx.oncomplete = () => { + console.log("oncomplete called"); + }; + + const store = tx.objectStore("books"); + + store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 }); + store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 }); + store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 }); + + await promiseFromTransaction(tx); + + const exportedData = backend.exportDump(); + const backend2 = new MemoryBackend(); + backend2.importDump(exportedData); + const exportedData2 = backend2.exportDump(); + + t.assert( + exportedData.databases[dbname].objectStores["books"].records.length === + 3, + ); + t.deepEqual(exportedData, exportedData2); + + t.is(exportedData.databases[dbname].schema.databaseVersion, 42); + t.is(exportedData2.databases[dbname].schema.databaseVersion, 42); + t.pass(); +}); + +test("update with non-existent index values", async (t) => { + const idb = useTestIndexedDb(); + const dbname = "mydb-" + new Date().getTime() + Math.random(); + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("bla", { keyPath: "x" }); + store.createIndex("by_y", "y"); + store.createIndex("by_z", "z"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + + t.is(db.name, dbname); + + { + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + store.put({ x: 0, y: "a", z: 42 }); + const index = store.index("by_z"); + const indRes = await promiseFromRequest(index.get(42)); + t.is(indRes.x, 0); + const res = await promiseFromRequest(store.get(0)); + t.is(res.z, 42); + await promiseFromTransaction(tx); + } + + { + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + store.put({ x: 0, y: "a" }); + const res = await promiseFromRequest(store.get(0)); + t.is(res.z, undefined); + await promiseFromTransaction(tx); + } + + { + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + const index = store.index("by_z"); + { + const indRes = await promiseFromRequest(index.get(42)); + t.is(indRes, undefined); + } + const res = await promiseFromRequest(store.get(0)); + t.is(res.z, undefined); + await promiseFromTransaction(tx); + } + + t.pass(); +}); + +test("delete from unique index", async (t) => { + const idb = useTestIndexedDb(); + const dbname = "mydb-" + new Date().getTime() + Math.random(); + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result as IDBDatabase; + const store = db.createObjectStore("bla", { keyPath: "x" }); + store.createIndex("by_yz", ["y", "z"], { + unique: true, + }); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + + t.is(db.name, dbname); + + { + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + store.put({ x: 0, y: "a", z: 42 }); + const index = store.index("by_yz"); + const indRes = await promiseFromRequest(index.get(["a", 42])); + t.is(indRes.x, 0); + const res = await promiseFromRequest(store.get(0)); + t.is(res.z, 42); + await promiseFromTransaction(tx); + } + + { + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + store.put({ x: 0, y: "a", z: 42, extra: 123 }); + await promiseFromTransaction(tx); + } + + t.pass(); +}); + +test("range queries", async (t) => { + const idb = useTestIndexedDb(); + const dbname = "mydb-" + new Date().getTime() + Math.random(); + const request = idb.open(dbname); + request.onupgradeneeded = () => { + const db = request.result; + const store = db.createObjectStore("bla", { keyPath: "x" }); + store.createIndex("by_y", "y"); + store.createIndex("by_z", "z"); + }; + + const db: BridgeIDBDatabase = await promiseFromRequest(request); + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + + store.put({ x: 0, y: "a" }); + store.put({ x: 2, y: "a" }); + store.put({ x: 4, y: "b" }); + store.put({ x: 8, y: "b" }); + store.put({ x: 10, y: "c" }); + store.put({ x: 12, y: "c" }); + + await promiseFromTransaction(tx); + + async function doCursorStoreQuery( + range: IDBKeyRange | IDBValidKey | undefined, + direction: IDBCursorDirection | undefined, + expected: any[], + ): Promise { + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + const vals: any[] = []; + + const req = store.openCursor(range, direction); + while (1) { + await promiseFromRequest(req); + const cursor: IDBCursorWithValue = req.result; + if (!cursor) { + break; + } + cursor.continue(); + vals.push(cursor.value); + } + + await promiseFromTransaction(tx); + + t.deepEqual(vals, expected); + } + + async function doCursorIndexQuery( + range: IDBKeyRange | IDBValidKey | undefined, + direction: IDBCursorDirection | undefined, + expected: any[], + ): Promise { + const tx = db.transaction("bla", "readwrite"); + const store = tx.objectStore("bla"); + const index = store.index("by_y"); + const vals: any[] = []; + + const req = index.openCursor(range, direction); + while (1) { + await promiseFromRequest(req); + const cursor: IDBCursorWithValue = req.result; + if (!cursor) { + break; + } + cursor.continue(); + vals.push(cursor.value); + } + + await promiseFromTransaction(tx); + + t.deepEqual(vals, expected); + } + + await doCursorStoreQuery(undefined, undefined, [ + { + x: 0, + y: "a", + }, + { + x: 2, + y: "a", + }, + { + x: 4, + y: "b", + }, + { + x: 8, + y: "b", + }, + { + x: 10, + y: "c", + }, + { + x: 12, + y: "c", + }, + ]); + + await doCursorStoreQuery( + BridgeIDBKeyRange.bound(0, 12, true, true), + undefined, + [ + { + x: 2, + y: "a", + }, + { + x: 4, + y: "b", + }, + { + x: 8, + y: "b", + }, + { + x: 10, + y: "c", + }, + ], + ); + + await doCursorIndexQuery( + BridgeIDBKeyRange.bound("a", "c", true, true), + undefined, + [ + { + x: 4, + y: "b", + }, + { + x: 8, + y: "b", + }, + ], + ); + + await doCursorIndexQuery(undefined, "nextunique", [ + { + x: 0, + y: "a", + }, + { + x: 4, + y: "b", + }, + { + x: 10, + y: "c", + }, + ]); + + await doCursorIndexQuery(undefined, "prevunique", [ + { + x: 10, + y: "c", + }, + { + x: 4, + y: "b", + }, + { + x: 0, + y: "a", + }, + ]); + + db.close(); + + t.pass(); +}); diff --git a/packages/idb-bridge/src/bridge-idb.ts b/packages/idb-bridge/src/bridge-idb.ts index 128a6900d..8cecba534 100644 --- a/packages/idb-bridge/src/bridge-idb.ts +++ b/packages/idb-bridge/src/bridge-idb.ts @@ -17,12 +17,16 @@ import { Backend, + ConnectResult, DatabaseConnection, DatabaseTransaction, - RecordGetRequest, + IndexGetQuery, + IndexMeta, + ObjectStoreGetQuery, + ObjectStoreMeta, + RecordGetResponse, RecordStoreRequest, ResultLevel, - Schema, StoreLevel, } from "./backend-interface.js"; import { @@ -57,10 +61,7 @@ import { TransactionInactiveError, VersionError, } from "./util/errors.js"; -import { - FakeDOMStringList, - fakeDOMStringList, -} from "./util/fakeDOMStringList.js"; +import { fakeDOMStringList } from "./util/fakeDOMStringList.js"; import FakeEvent from "./util/FakeEvent.js"; import FakeEventTarget from "./util/FakeEventTarget.js"; import { makeStoreKeyValue } from "./util/makeStoreKeyValue.js"; @@ -71,17 +72,14 @@ import { checkStructuredCloneOrThrow } from "./util/structuredClone.js"; import { validateKeyPath } from "./util/validateKeyPath.js"; import { valueToKey } from "./util/valueToKey.js"; -/** @public */ export type CursorSource = BridgeIDBIndex | BridgeIDBObjectStore; -/** @public */ export interface RequestObj { operation: () => Promise; request?: BridgeIDBRequest | undefined; source?: any; } -/** @public */ export interface BridgeIDBDatabaseInfo { name: string; version: number; @@ -101,8 +99,6 @@ function simplifyRange( /** * http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#cursor - * - * @public */ export class BridgeIDBCursor implements IDBCursor { _request: BridgeIDBRequest | undefined; @@ -207,29 +203,56 @@ export class BridgeIDBCursor implements IDBCursor { ); BridgeIDBFactory.enableTracing && console.log("cursor type ", this.toString()); - const isIndex = this._indexName !== undefined; - const recordGetRequest: RecordGetRequest = { - direction: this.direction, - indexName: this._indexName, - lastIndexPosition: this._indexPosition, - lastObjectStorePosition: this._objectStorePosition, - limit: 1, - range: simplifyRange(this._range), - objectStoreName: this._objectStoreName, - advanceIndexKey: isIndex ? key : undefined, - advancePrimaryKey: isIndex ? primaryKey : key, - resultLevel: this._keyOnly ? ResultLevel.OnlyKeys : ResultLevel.Full, - }; + const indexName = this._indexName; const { btx } = this.source._confirmStartedBackendTransaction(); - let response = await this._backend.getRecords(btx, recordGetRequest); + let response: RecordGetResponse; + + if (indexName != null) { + const indexRecordGetRequest: IndexGetQuery = { + direction: this.direction, + indexName: indexName, + lastIndexPosition: this._indexPosition, + lastObjectStorePosition: this._objectStorePosition, + limit: 1, + range: simplifyRange(this._range), + objectStoreName: this._objectStoreName, + advanceIndexKey: key, + advancePrimaryKey: primaryKey, + resultLevel: this._keyOnly ? ResultLevel.OnlyKeys : ResultLevel.Full, + }; + response = await this._backend.getIndexRecords( + btx, + indexRecordGetRequest, + ); + } else { + if (primaryKey != null) { + // Only allowed for index cursors + throw new InvalidAccessError(); + } + const objStoreGetRequest: ObjectStoreGetQuery = { + direction: this.direction, + lastObjectStorePosition: this._objectStorePosition, + limit: 1, + range: simplifyRange(this._range), + objectStoreName: this._objectStoreName, + advancePrimaryKey: key, + resultLevel: this._keyOnly ? ResultLevel.OnlyKeys : ResultLevel.Full, + }; + response = await this._backend.getObjectStoreRecords( + btx, + objStoreGetRequest, + ); + } if (response.count === 0) { if (BridgeIDBFactory.enableTracing) { console.log("cursor is returning empty result"); } this._gotValue = false; + this._key = undefined; + this._value = undefined; return null; } @@ -237,11 +260,6 @@ export class BridgeIDBCursor implements IDBCursor { throw Error("invariant failed"); } - if (BridgeIDBFactory.enableTracing) { - console.log("request is:", JSON.stringify(recordGetRequest)); - console.log("get response is:", JSON.stringify(response)); - } - if (this._indexName !== undefined) { this._key = response.indexKeys![0]; } else { @@ -550,7 +568,6 @@ const confirmActiveVersionchangeTransaction = (database: BridgeIDBDatabase) => { }; // http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#database-interface -/** @public */ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { _closePending = false; _closed = false; @@ -561,7 +578,16 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { _backendConnection: DatabaseConnection; _backend: Backend; - _schema: Schema; + _name: string; + + _initialVersion: number; + + _version: number; + + // "object store set" from the spec + _objectStoreSet: string[]; + + // _schema: Schema; /** * Name that can be set to identify the object store in logs. @@ -569,17 +595,15 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { _debugName: string | undefined = undefined; get name(): string { - return this._schema.databaseName; + return this._name; } get version(): number { - return this._schema.databaseVersion; + return this._version; } get objectStoreNames(): DOMStringList { - return fakeDOMStringList( - Object.keys(this._schema.objectStores), - ).sort() as DOMStringList; + return fakeDOMStringList([...this._objectStoreSet]).sort() as DOMStringList; } /** @@ -606,13 +630,13 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { } } - constructor(backend: Backend, backendConnection: DatabaseConnection) { + constructor(name: string, backend: Backend, connResult: ConnectResult) { super(); - - this._schema = backend.getSchema(backendConnection); - + this._name = name; + this._version = this._initialVersion = connResult.version; this._backend = backend; - this._backendConnection = backendConnection; + this._backendConnection = connResult.conn; + this._objectStoreSet = connResult.objectStores; } // http://w3c.github.io/IndexedDB/#dom-idbdatabase-createobjectstore @@ -645,7 +669,8 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { validateKeyPath(keyPath); } - if (Object.keys(this._schema.objectStores).includes(name)) { + if (this._objectStoreSet.includes(name)) { + // Already exists throw new ConstraintError(); } @@ -660,7 +685,9 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { autoIncrement, ); - this._schema = this._backend.getCurrentTransactionSchema(backendTx); + transaction._scope.add(name); + this._objectStoreSet.push(name); + this._objectStoreSet.sort(); const newObjectStore = transaction.objectStore(name); newObjectStore._justCreated = true; @@ -682,6 +709,10 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { os._deleted = true; transaction._objectStoresCache.delete(name); } + transaction._cachedObjectStoreNames = undefined; + transaction._scope.delete(name); + const nameIdx = this._objectStoreSet.indexOf(name); + this._objectStoreSet.splice(nameIdx, 1); } public _internalTransaction( @@ -766,10 +797,8 @@ export class BridgeIDBDatabase extends FakeEventTarget implements IDBDatabase { } } -/** @public */ export type DatabaseList = Array<{ name: string; version: number }>; -/** @public */ export class BridgeIDBFactory { public cmp = compareKeys; private backend: Backend; @@ -810,8 +839,10 @@ export class BridgeIDBFactory { }); request.dispatchEvent(event2); } catch (err: any) { - request.error = new Error(); - request.error.name = err.name; + const myErr = new Error(); + myErr.name = err.name; + myErr.message = err.message; + request.error = myErr; request.readyState = "done"; const event = new FakeEvent("error", { @@ -841,27 +872,26 @@ export class BridgeIDBFactory { const request = new BridgeIDBOpenDBRequest(); queueTask(async () => { - let dbconn: DatabaseConnection; + let dbConnRes: ConnectResult; try { if (BridgeIDBFactory.enableTracing) { console.log("TRACE: connecting to database"); } - dbconn = await this.backend.connectDatabase(name); + dbConnRes = await this.backend.connectDatabase(name); if (BridgeIDBFactory.enableTracing) { console.log("TRACE: connected!"); } } catch (err: any) { if (BridgeIDBFactory.enableTracing) { console.log( - "TRACE: caught exception while trying to connect with backend", + "TRACE: caught exception while trying to connect with backend:", + err, ); } request._finishWithError(err); return; } - - const schema = this.backend.getSchema(dbconn); - const existingVersion = schema.databaseVersion; + const existingVersion = dbConnRes.version; if (version === undefined) { version = existingVersion !== 0 ? existingVersion : 1; @@ -879,7 +909,7 @@ export class BridgeIDBFactory { return; } - const db = new BridgeIDBDatabase(this.backend, dbconn); + const db = new BridgeIDBDatabase(name, this.backend, dbConnRes); if (existingVersion == requestedVersion) { request.result = db; @@ -929,16 +959,14 @@ export class BridgeIDBFactory { } const backendTransaction = await this.backend.enterVersionChange( - dbconn, + dbConnRes.conn, requestedVersion, ); // We need to expose the new version number to the upgrade transaction. - db._schema = - this.backend.getCurrentTransactionSchema(backendTransaction); - + db._version = version; const transaction = db._internalTransaction( - [], + dbConnRes.objectStores, "versionchange", backendTransaction, request, @@ -1030,37 +1058,48 @@ export class BridgeIDBFactory { } // http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#idl-def-IDBIndex -/** @public */ export class BridgeIDBIndex implements IDBIndex { _objectStore: BridgeIDBObjectStore; + _indexMeta: IndexMeta; + _originalName: string | undefined = undefined; + _deleted: boolean = false; + _name: string; + + /** + * Was this index newly created in the current transaction? + */ + _justCreated: boolean = false; get objectStore(): IDBObjectStore { return this._objectStore; } - get _schema(): Schema { - return this._objectStore._transaction._db._schema; - } - get keyPath(): IDBKeyPath | IDBKeyPath[] { - return this._schema.objectStores[this._objectStore.name].indexes[this._name] - .keyPath; + return this._indexMeta.keyPath; } get multiEntry(): boolean { - return this._schema.objectStores[this._objectStore.name].indexes[this._name] - .multiEntry; + return this._indexMeta.multiEntry; } get unique(): boolean { - return this._schema.objectStores[this._objectStore.name].indexes[this._name] - .unique; + return this._indexMeta.multiEntry; } get _backend(): Backend { return this._objectStore._backend; } + constructor( + objectStore: BridgeIDBObjectStore, + name: string, + indexMeta: IndexMeta, + ) { + this._name = name; + this._objectStore = objectStore; + this._indexMeta = indexMeta; + } + _confirmStartedBackendTransaction(): { btx: DatabaseTransaction } { return this._objectStore._confirmStartedBackendTransaction(); } @@ -1069,20 +1108,6 @@ export class BridgeIDBIndex implements IDBIndex { this._objectStore._confirmActiveTransaction(); } - private _name: string; - - public _deleted: boolean = false; - - /** - * Was this index newly created in the current transaction? - */ - _justCreated: boolean = false; - - constructor(objectStore: BridgeIDBObjectStore, name: string) { - this._name = name; - this._objectStore = objectStore; - } - get name() { return this._name; } @@ -1107,18 +1132,39 @@ export class BridgeIDBIndex implements IDBIndex { if (newName === oldName) { return; } - + if (this._originalName != null) { + this._originalName = oldName; + } this._backend.renameIndex(btx, this._objectStore.name, oldName, newName); + this._applyNameChange(oldName, newName); + if (this._objectStore._objectStoreMeta.indexSet.indexOf(name) >= 0) { + throw new Error("internal invariant violated"); + } + } - this._objectStore._transaction._db._schema = - this._backend.getCurrentTransactionSchema(btx); - - this._objectStore._indexesCache.delete(oldName); - this._objectStore._indexesCache.set(newName, this); + _applyNameChange(oldName: string, newName: string) { + this._objectStore._indexHandlesCache.delete(oldName); + this._objectStore._indexHandlesCache.set(newName, this); + const indexSet = this._objectStore._objectStoreMeta.indexSet; + const indexIdx = indexSet.indexOf(oldName); + indexSet[indexIdx] = newName; + indexSet.sort(); this._name = newName; + } - if (this._objectStore._indexNames.indexOf(name) >= 0) { - throw new Error("internal invariant violated"); + _applyDelete() { + this._objectStore._indexHandlesCache.delete(this._name); + const indexSet = this._objectStore._objectStoreMeta.indexSet; + const indexIdx = indexSet.indexOf(this._name); + indexSet.splice(indexIdx, 1); + } + + _abort() { + if (this._originalName != null) { + this._applyNameChange(this._name, this._originalName); + } + if (this._justCreated) { + this._deleted = true; } } @@ -1199,34 +1245,23 @@ export class BridgeIDBIndex implements IDBIndex { } private _confirmIndexExists() { - const storeSchema = this._schema.objectStores[this._objectStore._name]; - if (!storeSchema) { - throw new InvalidStateError( - `no schema for object store '${this._objectStore._name}'`, - ); - } - if (!storeSchema.indexes[this._name]) { - throw new InvalidStateError( - `no schema for index '${this._name}' of object store '${this._objectStore._name}'`, - ); - } - } - - get(key: BridgeIDBKeyRange | IDBValidKey) { if (this._deleted) { throw new InvalidStateError(); } if (this._objectStore._deleted) { throw new InvalidStateError(); } - this._confirmActiveTransaction(); + } + + get(key: BridgeIDBKeyRange | IDBValidKey) { this._confirmIndexExists(); + this._confirmActiveTransaction(); if (!(key instanceof BridgeIDBKeyRange)) { key = BridgeIDBKeyRange._valueToKeyRange(key); } - const getReq: RecordGetRequest = { + const getReq: IndexGetQuery = { direction: "next", indexName: this._name, limit: 1, @@ -1237,7 +1272,7 @@ export class BridgeIDBIndex implements IDBIndex { const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, getReq); + const result = await this._backend.getIndexRecords(btx, getReq); if (result.count == 0) { return undefined; } @@ -1273,7 +1308,7 @@ export class BridgeIDBIndex implements IDBIndex { count = -1; } - const getReq: RecordGetRequest = { + const getReq: IndexGetQuery = { direction: "next", indexName: this._name, limit: count, @@ -1284,7 +1319,7 @@ export class BridgeIDBIndex implements IDBIndex { const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, getReq); + const result = await this._backend.getIndexRecords(btx, getReq); const values = result.values; if (!values) { throw Error("invariant violated"); @@ -1307,7 +1342,7 @@ export class BridgeIDBIndex implements IDBIndex { key = BridgeIDBKeyRange._valueToKeyRange(key); } - const getReq: RecordGetRequest = { + const getReq: IndexGetQuery = { direction: "next", indexName: this._name, limit: 1, @@ -1318,7 +1353,7 @@ export class BridgeIDBIndex implements IDBIndex { const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, getReq); + const result = await this._backend.getIndexRecords(btx, getReq); if (result.count == 0) { return undefined; } @@ -1351,7 +1386,7 @@ export class BridgeIDBIndex implements IDBIndex { count = -1; } - const getReq: RecordGetRequest = { + const getReq: IndexGetQuery = { direction: "next", indexName: this._name, limit: count, @@ -1362,7 +1397,7 @@ export class BridgeIDBIndex implements IDBIndex { const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, getReq); + const result = await this._backend.getIndexRecords(btx, getReq); const primaryKeys = result.primaryKeys; if (!primaryKeys) { throw Error("invariant violated"); @@ -1388,7 +1423,7 @@ export class BridgeIDBIndex implements IDBIndex { key = BridgeIDBKeyRange.only(valueToKey(key)); } - const getReq: RecordGetRequest = { + const getReq: IndexGetQuery = { direction: "next", indexName: this._name, limit: 1, @@ -1399,7 +1434,7 @@ export class BridgeIDBIndex implements IDBIndex { const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, getReq); + const result = await this._backend.getIndexRecords(btx, getReq); return result.count; }; @@ -1415,7 +1450,6 @@ export class BridgeIDBIndex implements IDBIndex { } // http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#range-concept -/** @public */ export class BridgeIDBKeyRange { public static only(value: IDBValidKey) { if (arguments.length === 0) { @@ -1525,10 +1559,8 @@ export class BridgeIDBKeyRange { } // http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#object-store -/** @public */ export class BridgeIDBObjectStore implements IDBObjectStore { - _indexesCache: Map = new Map(); - + _indexHandlesCache: Map = new Map(); _transaction: BridgeIDBTransaction; /** @@ -1536,41 +1568,43 @@ export class BridgeIDBObjectStore implements IDBObjectStore { */ _debugName: string | undefined = undefined; + // Was the object store (not the handle, but the underlying store) + // created in this upgrade transaction? _justCreated: boolean = false; + _originalName: string | undefined = undefined; + _objectStoreMeta: ObjectStoreMeta; + get transaction(): IDBTransaction { return this._transaction; } get autoIncrement(): boolean { - return this._schema.objectStores[this._name].autoIncrement; - } - - get _indexNames(): FakeDOMStringList { - return fakeDOMStringList( - Object.keys(this._schema.objectStores[this._name].indexes), - ).sort(); + return this._objectStoreMeta.autoIncrement; } get indexNames(): DOMStringList { - return this._indexNames as DOMStringList; + return fakeDOMStringList([...this._objectStoreMeta.indexSet]); } get keyPath(): IDBKeyPath | IDBKeyPath[] { - return this._schema.objectStores[this._name].keyPath!; + // Bug in th official type declarations. The spec + // allows returning null here. + return this._objectStoreMeta.keyPath!; } _name: string; - get _schema(): Schema { - return this._transaction._db._schema; - } - _deleted: boolean = false; - constructor(transaction: BridgeIDBTransaction, name: string) { + constructor( + transaction: BridgeIDBTransaction, + name: string, + objectStoreMeta: ObjectStoreMeta, + ) { this._name = name; this._transaction = transaction; + this._objectStoreMeta = objectStoreMeta; } get name() { @@ -1620,26 +1654,56 @@ export class BridgeIDBObjectStore implements IDBObjectStore { let { btx } = this._confirmStartedBackendTransaction(); newName = String(newName); - const oldName = this._name; - if (newName === oldName) { return; } - + if (this._originalName == null) { + this._originalName = this._name; + } this._backend.renameObjectStore(btx, oldName, newName); - this._transaction._db._schema = - this._backend.getCurrentTransactionSchema(btx); + this._applyNameChange(oldName, newName); + } + _applyNameChange(oldName: string, newName: string) { + this._transaction._scope.delete(oldName); + this._transaction._scope.add(newName); // We don't modify scope, as the scope of the transaction // doesn't matter if we're in an upgrade transaction. this._transaction._objectStoresCache.delete(oldName); this._transaction._objectStoresCache.set(newName, this); this._transaction._cachedObjectStoreNames = undefined; - + const objectStoreSet = this._transaction._db._objectStoreSet; + const oldIdx = objectStoreSet.indexOf(oldName); + objectStoreSet[oldIdx] = newName; + objectStoreSet.sort(); this._name = newName; } + _applyDelete() { + this._deleted = true; + this._transaction._objectStoresCache.delete(this._name); + this._transaction._cachedObjectStoreNames = undefined; + const objectStoreSet = this._transaction._db._objectStoreSet; + const oldIdx = objectStoreSet.indexOf(this._name); + objectStoreSet.splice(oldIdx, 1); + } + + /** + * Roll back changes to the handle after an abort. + */ + _abort() { + if (this._originalName != null) { + this._applyNameChange(this._name, this._originalName); + } + if (this._justCreated) { + this._applyDelete(); + } + } + + /** + * "To add or put with handle, value, key, and no-overwrite flag, run these steps:" + */ public _store(value: any, key: IDBValidKey | undefined, overwrite: boolean) { if (BridgeIDBFactory.enableTracing) { console.log( @@ -1647,6 +1711,12 @@ export class BridgeIDBObjectStore implements IDBObjectStore { ); } + if (this._deleted) { + throw new InvalidStateError( + "tried to call 'put' on a deleted object store", + ); + } + if (!this._transaction._active) { throw new TransactionInactiveError(); } @@ -1655,14 +1725,21 @@ export class BridgeIDBObjectStore implements IDBObjectStore { throw new ReadOnlyError(); } - const { keyPath, autoIncrement } = this._schema.objectStores[this._name]; + const { keyPath, autoIncrement } = this._objectStoreMeta; if (key !== null && key !== undefined) { valueToKey(key); } // We only call this to synchronously verify the request. - makeStoreKeyValue(value, key, 1, autoIncrement, keyPath); + // FIXME: The backend should do that! + makeStoreKeyValue({ + value: value, + key: key, + currentKeyGenerator: 1, + autoIncrement, + keyPath, + }); const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); @@ -1684,11 +1761,6 @@ export class BridgeIDBObjectStore implements IDBObjectStore { if (arguments.length === 0) { throw new TypeError(); } - if (this._deleted) { - throw new InvalidStateError( - "tried to call 'put' on a deleted object store", - ); - } return this._store(value, key, true); } @@ -1696,9 +1768,6 @@ export class BridgeIDBObjectStore implements IDBObjectStore { if (arguments.length === 0) { throw new TypeError(); } - if (!this._schema.objectStores[this._name]) { - throw new InvalidStateError("object store does not exist"); - } return this._store(value, key, false); } @@ -1767,10 +1836,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore { } } - const recordRequest: RecordGetRequest = { + const recordRequest: ObjectStoreGetQuery = { objectStoreName: this._name, - indexName: undefined, - lastIndexPosition: undefined, lastObjectStorePosition: undefined, direction: "next", limit: 1, @@ -1783,7 +1850,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore { console.log("running get operation:", recordRequest); } const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, recordRequest); + const result = await this._backend.getObjectStoreRecords( + btx, + recordRequest, + ); if (BridgeIDBFactory.enableTracing) { console.log("get operation result count:", result.count); @@ -1833,10 +1903,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore { let keyRange: BridgeIDBKeyRange | null = simplifyRange(query); - const recordRequest: RecordGetRequest = { + const recordRequest: ObjectStoreGetQuery = { objectStoreName: this._name, - indexName: undefined, - lastIndexPosition: undefined, lastObjectStorePosition: undefined, direction: "next", limit: count, @@ -1849,7 +1917,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore { console.log("running getAll operation:", recordRequest); } const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, recordRequest); + const result = await this._backend.getObjectStoreRecords( + btx, + recordRequest, + ); if (BridgeIDBFactory.enableTracing) { console.log("get operation result count:", result.count); @@ -1887,10 +1958,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore { let keyRange: BridgeIDBKeyRange | null = simplifyRange(query); - const recordRequest: RecordGetRequest = { + const recordRequest: ObjectStoreGetQuery = { objectStoreName: this._name, - indexName: undefined, - lastIndexPosition: undefined, lastObjectStorePosition: undefined, direction: "next", limit: 1, @@ -1903,7 +1972,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore { console.log("running getKey operation:", recordRequest); } const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, recordRequest); + const result = await this._backend.getObjectStoreRecords( + btx, + recordRequest, + ); if (BridgeIDBFactory.enableTracing) { console.log("getKey operation result count:", result.count); @@ -1965,10 +2037,8 @@ export class BridgeIDBObjectStore implements IDBObjectStore { } } - const recordRequest: RecordGetRequest = { + const recordRequest: ObjectStoreGetQuery = { objectStoreName: this._name, - indexName: undefined, - lastIndexPosition: undefined, lastObjectStorePosition: undefined, direction: "next", limit: count, @@ -1978,7 +2048,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore { const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, recordRequest); + const result = await this._backend.getObjectStoreRecords( + btx, + recordRequest, + ); const primaryKeys = result.primaryKeys; if (!primaryKeys) { @@ -2121,7 +2194,7 @@ export class BridgeIDBObjectStore implements IDBObjectStore { throw new InvalidStateError(); } - if (this._indexNames.indexOf(indexName) >= 0) { + if (this._objectStoreMeta.indexSet.indexOf(indexName) >= 0) { throw new ConstraintError(); } @@ -2140,6 +2213,9 @@ export class BridgeIDBObjectStore implements IDBObjectStore { unique, ); + this._objectStoreMeta.indexSet.push(indexName); + this._objectStoreMeta.indexSet.sort(); + const idx = this.index(indexName); idx._justCreated = true; return idx; @@ -2154,13 +2230,20 @@ export class BridgeIDBObjectStore implements IDBObjectStore { if (this._transaction._finished) { throw new InvalidStateError(); } - - const index = this._indexesCache.get(name); + const index = this._indexHandlesCache.get(name); if (index !== undefined) { return index; } - const newIndex = new BridgeIDBIndex(this, name); - this._indexesCache.set(name, newIndex); + const indexMeta = this._backend.getIndexMeta( + this._backendConnection, + this._name, + name, + ); + if (!indexMeta) { + throw new NotFoundError(); + } + const newIndex = new BridgeIDBIndex(this, name, indexMeta); + this._indexHandlesCache.set(name, newIndex); this._transaction._usedIndexes.push(newIndex); return newIndex; } @@ -2180,12 +2263,15 @@ export class BridgeIDBObjectStore implements IDBObjectStore { const { btx } = this._confirmStartedBackendTransaction(); - const index = this._indexesCache.get(indexName); + const index = this._indexHandlesCache.get(indexName); if (index !== undefined) { index._deleted = true; - this._indexesCache.delete(indexName); + this._indexHandlesCache.delete(indexName); } + const indexIdx = this._objectStoreMeta.indexSet.indexOf(indexName); + this._objectStoreMeta.indexSet.splice(indexIdx, 1); + this._backend.deleteIndex(btx, this._name, indexName); } @@ -2198,11 +2284,9 @@ export class BridgeIDBObjectStore implements IDBObjectStore { key = BridgeIDBKeyRange.only(valueToKey(key)); } - const recordGetRequest: RecordGetRequest = { + const recordGetRequest: ObjectStoreGetQuery = { direction: "next", - indexName: undefined, - lastIndexPosition: undefined, - limit: -1, + limit: 0, objectStoreName: this._name, lastObjectStorePosition: undefined, range: key, @@ -2211,7 +2295,10 @@ export class BridgeIDBObjectStore implements IDBObjectStore { const operation = async () => { const { btx } = this._confirmStartedBackendTransaction(); - const result = await this._backend.getRecords(btx, recordGetRequest); + const result = await this._backend.getObjectStoreRecords( + btx, + recordGetRequest, + ); return result.count; }; @@ -2223,7 +2310,6 @@ export class BridgeIDBObjectStore implements IDBObjectStore { } } -/** @public */ export class BridgeIDBRequest extends FakeEventTarget implements IDBRequest { _result: any = null; _error: Error | null | undefined = null; @@ -2294,7 +2380,6 @@ export class BridgeIDBRequest extends FakeEventTarget implements IDBRequest { } } -/** @public */ export class BridgeIDBOpenDBRequest extends BridgeIDBRequest implements IDBOpenDBRequest @@ -2343,7 +2428,6 @@ function waitMacroQueue(): Promise { } // http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#transaction -/** @public */ export class BridgeIDBTransaction extends FakeEventTarget implements IDBTransaction @@ -2390,13 +2474,9 @@ export class BridgeIDBTransaction get objectStoreNames(): DOMStringList { if (!this._cachedObjectStoreNames) { - if (this._openRequest) { - this._cachedObjectStoreNames = this._db.objectStoreNames; - } else { - this._cachedObjectStoreNames = fakeDOMStringList( - Array.from(this._scope).sort(), - ); - } + this._cachedObjectStoreNames = fakeDOMStringList( + Array.from(this._scope).sort(), + ); } return this._cachedObjectStoreNames; } @@ -2496,41 +2576,34 @@ export class BridgeIDBTransaction } } + // All steps before happened synchronously. Now + // we asynchronously roll back the backend transaction, + // if necessary/possible. + + const maybeBtx = this._backendTransaction; + if (maybeBtx) { + this._backend.rollback(maybeBtx); + } + // "Any object stores and indexes which were created during the // transaction are now considered deleted for the purposes of other // algorithms." if (this._db._upgradeTransaction) { for (const os of this._usedObjectStores) { - if (os._justCreated) { - os._deleted = true; - } + os._abort(); } for (const ind of this._usedIndexes) { - if (ind._justCreated) { - ind._deleted = true; - } + ind._abort(); } } + this._db._version = this._db._initialVersion; + // ("abort a transaction", step 5.1) if (this._openRequest) { this._db._upgradeTransaction = null; } - // All steps before happened synchronously. Now - // we asynchronously roll back the backend transaction, - // if necessary/possible. - - const maybeBtx = this._backendTransaction; - if (maybeBtx) { - this._db._schema = this._backend.getInitialTransactionSchema(maybeBtx); - // Only roll back if we actually executed the scheduled operations. - await this._backend.rollback(maybeBtx); - this._backendTransaction = undefined; - } else { - this._db._schema = this._backend.getSchema(this._db._backendConnection); - } - queueTask(() => { const event = new FakeEvent("abort", { bubbles: true, @@ -2560,22 +2633,29 @@ export class BridgeIDBTransaction throw new TransactionInactiveError(); } - if (!this._db._schema.objectStores[name]) { + if (!this._scope.has(name)) { throw new NotFoundError(); } - if (!this._db._upgradeTransaction) { - if (!this._scope.has(name)) { - throw new NotFoundError(); - } - } - const objectStore = this._objectStoresCache.get(name); if (objectStore !== undefined) { return objectStore; } - const newObjectStore = new BridgeIDBObjectStore(this, name); + const objectStoreMeta = this._backend.getObjectStoreMeta( + this._db._backendConnection, + name, + ); + + if (!objectStoreMeta) { + throw new NotFoundError(); + } + + const newObjectStore = new BridgeIDBObjectStore( + this, + name, + objectStoreMeta, + ); this._objectStoresCache.set(name, newObjectStore); this._usedObjectStores.push(newObjectStore); return newObjectStore; diff --git a/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts b/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts index bbbcf9b94..14d4f7d6e 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/abort-in-initial-upgradeneeded.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT test abort-in-initial-upgradeneeded.htm", async (t) => { await new Promise((resolve, reject) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts b/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts index 723a0abb5..1a730df0b 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/close-in-upgradeneeded.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // When db.close is called in upgradeneeded, the db is cleaned up on refresh test("WPT test close-in-upgradeneeded.htm", (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts b/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts index db2cdbca8..795d515ed 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/cursor-overloads.test.ts @@ -1,7 +1,9 @@ import test from "ava"; import { BridgeIDBKeyRange } from "../bridge-idb.js"; import { IDBRequest } from "../idbtypes.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); const IDBKeyRange = BridgeIDBKeyRange; diff --git a/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts b/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts index acc2a7578..e57b48f76 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/event-dispatch-active-flag.test.ts @@ -2,10 +2,13 @@ import test from "ava"; import { BridgeIDBRequest } from "../bridge-idb.js"; import { indexeddb_test, + initTestIndexedDB, is_transaction_active, keep_alive, } from "./wptsupport.js"; +test.before("test DB initialization", initTestIndexedDB); + test("WPT test abort-in-initial-upgradeneeded.htm (subtest 1)", async (t) => { // Transactions are active during success handlers await indexeddb_test( diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts index 108e7c91c..1bf5ca697 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-advance-index.test.ts @@ -1,6 +1,8 @@ import test from "ava"; import { BridgeIDBCursor,BridgeIDBRequest } from "../bridge-idb.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT test idbcursor_advance_index.htm", async (t) => { await new Promise((resolve, reject) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts index f8b3a0f01..3cea3e86d 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-index.test.ts @@ -1,6 +1,9 @@ import test from "ava"; import { BridgeIDBCursor, BridgeIDBCursorWithValue } from "../bridge-idb.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; +import { IDBDatabase } from "../idbtypes.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT test idbcursor_continue_index.htm", (t) => { return new Promise((resolve, reject) => { @@ -209,7 +212,7 @@ test("WPT idbcursor-continue-index4.htm", (t) => { // IDBCursor.continue() - index - iterate using 'prevunique' test("WPT idbcursor-continue-index5.htm", (t) => { return new Promise((resolve, reject) => { - var db: any; + var db: IDBDatabase; const records = [ { pKey: "primaryKey_0", iKey: "indexKey_0" }, { pKey: "primaryKey_1", iKey: "indexKey_1" }, diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts index e3169195f..d8b6f2b31 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-continue-objectstore.test.ts @@ -1,7 +1,9 @@ import test from "ava"; import { BridgeIDBCursor } from "../bridge-idb.js"; import { IDBDatabase } from "../idbtypes.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBCursor.continue() - object store - iterate to the next record test("WPT test idbcursor_continue_objectstore.htm", (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts index f771d19a2..e159129da 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-exception-order.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { indexeddb_test } from "./wptsupport.js"; +import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT idbcursor-delete-exception-order.htm", async (t) => { // 'IDBCursor.delete exception order: TransactionInactiveError vs. ReadOnlyError' diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts index 0232cf247..d34c9c3f9 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-index.test.ts @@ -1,7 +1,9 @@ import test from "ava"; import { BridgeIDBCursor } from "../bridge-idb.js"; import { IDBCursor } from "../idbtypes.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBCursor.delete() - index - remove a record from the object store test("WPT idbcursor-delete-index.htm", (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts index 9410ca79e..2b9993b19 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-delete-objectstore.test.ts @@ -1,6 +1,8 @@ import test from "ava"; import { BridgeIDBCursor } from "../bridge-idb.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBCursor.delete() - object store - remove a record from the object store test("WPT idbcursor-delete-objectstore.htm", (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts index 54745802e..b13bd1fc3 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-reused.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT idbcursor-reused.htm", async (t) => { await new Promise((resolve, reject) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts index 81a7cd753..8a878b35a 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbcursor-update-index.test.ts @@ -3,10 +3,13 @@ import { BridgeIDBCursor, BridgeIDBKeyRange } from "../bridge-idb.js"; import { createDatabase, createdb, + initTestIndexedDB, promiseForRequest, promiseForTransaction, } from "./wptsupport.js"; +test.before("test DB initialization", initTestIndexedDB); + // IDBCursor.update() - index - modify a record in the object store test("WPT test idbcursor_update_index.htm", (t) => { return new Promise((resolve, reject) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts index a6cb97612..450bec7be 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-cmp.test.ts @@ -1,8 +1,10 @@ import test from "ava"; -import { idbFactory } from "./wptsupport.js"; +import { initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT idbfactory-cmp*.html", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); var greater = indexedDB.cmp(2, 1); var equal = indexedDB.cmp(2, 2); var less = indexedDB.cmp(1, 2); diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts index 02618f171..b8046fc1b 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbfactory-open.test.ts @@ -1,7 +1,10 @@ import test from "ava"; import { BridgeIDBVersionChangeEvent } from "../bridge-idb.js"; import FakeEvent from "../util/FakeEvent.js"; -import { createdb, format_value, idbFactory } from "./wptsupport.js"; +import { createdb, format_value, initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js"; +import { IDBDatabase } from "../idbtypes.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBFactory.open() - request has no source test("WPT idbfactory-open.htm", async (t) => { @@ -36,7 +39,7 @@ test("WPT idbfactory-open2.htm", async (t) => { // IDBFactory.open() - no version opens current database test("WPT idbfactory-open3.htm", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); await new Promise((resolve, reject) => { var open_rq = createdb(t, undefined, 13); var did_upgrade = false; @@ -61,7 +64,6 @@ test("WPT idbfactory-open3.htm", async (t) => { // IDBFactory.open() - new database has default version test("WPT idbfactory-open4.htm", async (t) => { - const indexedDB = idbFactory; await new Promise((resolve, reject) => { var open_rq = createdb(t, t.title + "-database_name"); @@ -78,7 +80,6 @@ test("WPT idbfactory-open4.htm", async (t) => { // IDBFactory.open() - new database is empty test("WPT idbfactory-open5.htm", async (t) => { - const indexedDB = idbFactory; await new Promise((resolve, reject) => { var open_rq = createdb(t, t.title + "-database_name"); @@ -97,7 +98,7 @@ test("WPT idbfactory-open5.htm", async (t) => { // IDBFactory.open() - open database with a lower version than current test("WPT idbfactory-open6.htm", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); await new Promise((resolve, reject) => { var open_rq = createdb(t, undefined, 13); var open_rq2: any; @@ -131,7 +132,7 @@ test("WPT idbfactory-open6.htm", async (t) => { // IDBFactory.open() - open database with a higher version than current test("WPT idbfactory-open7.htm", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); await new Promise((resolve, reject) => { var open_rq = createdb(t, undefined, 13); var did_upgrade = false; @@ -169,7 +170,7 @@ test("WPT idbfactory-open7.htm", async (t) => { // IDBFactory.open() - error in version change transaction aborts open test("WPT idbfactory-open8.htm", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); await new Promise((resolve, reject) => { var open_rq = createdb(t, undefined, 13); var did_upgrade = false; @@ -193,7 +194,7 @@ test("WPT idbfactory-open8.htm", async (t) => { // IDBFactory.open() - errors in version argument test("WPT idbfactory-open9.htm", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); function should_throw(val: any, name?: string) { if (!name) { name = typeof val == "object" && val ? "object" : format_value(val); @@ -281,9 +282,9 @@ test("WPT idbfactory-open9.htm", async (t) => { // IDBFactory.open() - error in version change transaction aborts open test("WPT idbfactory-open10.htm", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); await new Promise((resolve, reject) => { - var db: any, db2: any; + var db: IDBDatabase, db2: IDBDatabase; var open_rq = createdb(t, undefined, 9); open_rq.onupgradeneeded = function (e: any) { @@ -350,7 +351,7 @@ test("WPT idbfactory-open10.htm", async (t) => { var open_rq3 = indexedDB.open(db.name); open_rq3.onsuccess = function (e: any) { - var db3 = e.target.result; + var db3: IDBDatabase = e.target.result; t.true( db3.objectStoreNames.contains("store"), @@ -407,7 +408,7 @@ test("WPT idbfactory-open10.htm", async (t) => { // IDBFactory.open() - second open's transaction is available to get objectStores test("WPT idbfactory-open11.htm", async (t) => { - const indexedDB = idbFactory; + const indexedDB = useTestIndexedDb(); await new Promise((resolve, reject) => { var db: any; var count_done = 0; @@ -472,8 +473,6 @@ test("WPT idbfactory-open11.htm", async (t) => { // IDBFactory.open() - upgradeneeded gets VersionChangeEvent test("WPT idbfactory-open12.htm", async (t) => { - const indexedDB = idbFactory; - var db: any; var open_rq = createdb(t, undefined, 9); diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts index d3b6e844e..ad8a57305 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbindex-get.test.ts @@ -1,7 +1,9 @@ import test from "ava"; import { BridgeIDBKeyRange } from "../bridge-idb.js"; import { IDBDatabase } from "../idbtypes.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBIndex.get() - returns the record test("WPT idbindex_get.htm", async (t) => { @@ -93,7 +95,7 @@ test("WPT idbindex_get3.htm", async (t) => { // IDBIndex.get() - returns the record with the first key in the range test("WPT idbindex_get4.htm", async (t) => { await new Promise((resolve, reject) => { - var db: any; + var db: IDBDatabase; var open_rq = createdb(t); diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts index 765bcf06a..5d61e68e5 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbindex-openCursor.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBIndex.openCursor() - throw InvalidStateError when the index is deleted test("WPT test idbindex-openCursor.htm", (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts index 901eda89c..60bf0cfb2 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add-put-exception-order.test.ts @@ -1,5 +1,7 @@ import test, { ExecutionContext } from "ava"; -import { indexeddb_test } from "./wptsupport.js"; +import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); async function t1(t: ExecutionContext, method: string): Promise { await indexeddb_test( @@ -55,8 +57,6 @@ async function t2(t: ExecutionContext, method: string): Promise { done(); }, 0); - - console.log(`queued task for ${method}`); }, "t2", ); diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts index e8bc17471..4941c43d6 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-add.test.ts @@ -1,7 +1,9 @@ import test from "ava"; import { BridgeIDBRequest } from "../bridge-idb.js"; import { IDBDatabase } from "../idbtypes.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBObjectStore.add() - add with an inline key test("WPT idbobjectstore_add.htm", async (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts index 79064d19d..922c2bcf4 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-get.test.ts @@ -1,6 +1,8 @@ import test from "ava"; import { BridgeIDBKeyRange } from "../bridge-idb.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBObjectStore.get() - key is a number test("WPT idbobjectstore_get.htm", (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts index 152e3a9c1..f051c57b6 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-put.test.ts @@ -1,6 +1,8 @@ import test from "ava"; import { BridgeIDBRequest } from "../bridge-idb.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBObjectStore.put() - put with an inline key test("WPT idbobjectstore_put.htm", (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts index a8aab828a..6f04552fa 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbobjectstore-rename-store.test.ts @@ -6,9 +6,12 @@ import { createBooksStore, createDatabase, createNotBooksStore, + initTestIndexedDB, migrateDatabase, } from "./wptsupport.js"; +test.before("test DB initialization", initTestIndexedDB); + // IndexedDB: object store renaming support // IndexedDB object store rename in new transaction test("WPT idbobjectstore-rename-store.html (subtest 1)", async (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts b/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts index a501ff2c9..f728cd487 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/idbtransaction-oncomplete.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // IDBTransaction - complete event test("WPT idbtransaction-oncomplete.htm", async (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts b/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts index 7ef1301f7..f15f93873 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/keypath.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { assert_key_equals, createdb } from "./wptsupport.js"; +import { assert_key_equals, createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT test keypath.htm", async (t) => { function keypath( @@ -9,8 +11,6 @@ test("WPT test keypath.htm", async (t) => { desc?: string, ) { return new Promise((resolve, reject) => { - console.log("key path", keypath); - console.log("checking", desc); let db: any; const store_name = "store-" + Date.now() + Math.random(); diff --git a/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts b/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts index 526c06784..14c8f3be5 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/request-bubble-and-capture.test.ts @@ -1,6 +1,8 @@ import test from "ava"; import { EventTarget } from "../idbtypes.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // Bubbling and capturing of request events test("WPT request_bubble-and-capture.htm", async (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts b/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts index 9d76e79f2..971330e3d 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/transaction-requestqueue.test.ts @@ -1,5 +1,7 @@ import test from "ava"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); // Transactions have a request queue test("transaction-requestqueue.htm", async (t) => { diff --git a/packages/idb-bridge/src/idb-wpt-ported/value.test.ts b/packages/idb-bridge/src/idb-wpt-ported/value.test.ts index a80ec2b5a..95712e152 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/value.test.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/value.test.ts @@ -1,6 +1,8 @@ import test from "ava"; import { IDBVersionChangeEvent } from "../idbtypes.js"; -import { createdb } from "./wptsupport.js"; +import { createdb, initTestIndexedDB } from "./wptsupport.js"; + +test.before("test DB initialization", initTestIndexedDB); test("WPT test value.htm, array", (t) => { return new Promise((resolve, reject) => { @@ -12,7 +14,6 @@ test("WPT test value.htm, array", (t) => { createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) { (e.target as any).result.createObjectStore("store").add(value, 1); (e.target as any).onsuccess = (e: any) => { - console.log("in first onsuccess"); e.target.result .transaction("store") .objectStore("store") @@ -35,13 +36,10 @@ test("WPT test value.htm, date", (t) => { createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) { (e.target as any).result.createObjectStore("store").add(value, 1); (e.target as any).onsuccess = (e: any) => { - console.log("in first onsuccess"); e.target.result .transaction("store") .objectStore("store") .get(1).onsuccess = (e: any) => { - console.log("target", e.target); - console.log("result", e.target.result); t.assert(e.target.result instanceof _instanceof, "instanceof"); resolve(); }; diff --git a/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts b/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts index 7f68a53e8..c648bf53f 100644 --- a/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts +++ b/packages/idb-bridge/src/idb-wpt-ported/wptsupport.ts @@ -1,5 +1,5 @@ import { ExecutionContext } from "ava"; -import { BridgeIDBFactory, BridgeIDBRequest } from "../bridge-idb.js"; +import { BridgeIDBRequest } from "../bridge-idb.js"; import { IDBDatabase, IDBIndex, @@ -8,17 +8,10 @@ import { IDBRequest, IDBTransaction, } from "../idbtypes.js"; -import { MemoryBackend } from "../MemoryBackend.js"; +import { initTestIndexedDB , useTestIndexedDb } from "../testingdb.js"; import { compareKeys } from "../util/cmp.js"; -BridgeIDBFactory.enableTracing = true; -const backend = new MemoryBackend(); -backend.enableTracing = true; -export const idbFactory = new BridgeIDBFactory(backend); - -const self = { - indexedDB: idbFactory, -}; +export { initTestIndexedDB, useTestIndexedDb } from "../testingdb.js" export function createdb( t: ExecutionContext, @@ -27,8 +20,8 @@ export function createdb( ): IDBOpenDBRequest { var rq_open: IDBOpenDBRequest; dbname = dbname ? dbname : "testdb-" + new Date().getTime() + Math.random(); - if (version) rq_open = self.indexedDB.open(dbname, version); - else rq_open = self.indexedDB.open(dbname); + if (version) rq_open = useTestIndexedDb().open(dbname, version); + else rq_open = useTestIndexedDb().open(dbname); return rq_open; } @@ -111,7 +104,7 @@ export async function migrateNamedDatabase( migrationCallback: MigrationCallback, ): Promise { return new Promise((resolve, reject) => { - const request = self.indexedDB.open(databaseName, newVersion); + const request = useTestIndexedDb().open(databaseName, newVersion); request.onupgradeneeded = (event: any) => { const database = event.target.result; const transaction = event.target.transaction; @@ -175,7 +168,7 @@ export async function createDatabase( setupCallback: MigrationCallback, ): Promise { const databaseName = makeDatabaseName(t.title); - const request = self.indexedDB.deleteDatabase(databaseName); + const request = useTestIndexedDb().deleteDatabase(databaseName); return migrateNamedDatabase(t, databaseName, 1, setupCallback); } @@ -463,9 +456,9 @@ export function indexeddb_test( options = Object.assign({ upgrade_will_abort: false }, options); const dbname = "testdb-" + new Date().getTime() + Math.random() + (dbsuffix ?? ""); - var del = self.indexedDB.deleteDatabase(dbname); + var del = useTestIndexedDb().deleteDatabase(dbname); del.onerror = () => t.fail("deleteDatabase should succeed"); - var open = self.indexedDB.open(dbname, 1); + var open = useTestIndexedDb().open(dbname, 1); open.onupgradeneeded = function () { var db = open.result; t.teardown(function () { @@ -474,7 +467,7 @@ export function indexeddb_test( e.preventDefault(); }; db.close(); - self.indexedDB.deleteDatabase(db.name); + useTestIndexedDb().deleteDatabase(db.name); }); var tx = open.transaction!; upgrade_func(resolve, db, tx, open); diff --git a/packages/idb-bridge/src/idbpromutil.ts b/packages/idb-bridge/src/idbpromutil.ts new file mode 100644 index 000000000..e711db027 --- /dev/null +++ b/packages/idb-bridge/src/idbpromutil.ts @@ -0,0 +1,26 @@ +import { BridgeIDBTransaction } from "./bridge-idb.js"; +import { IDBRequest } from "./idbtypes.js"; + +export function promiseFromRequest(request: IDBRequest): Promise { + return new Promise((resolve, reject) => { + request.onsuccess = () => { + resolve(request.result); + }; + request.onerror = () => { + reject(request.error); + }; + }); +} + +export function promiseFromTransaction( + transaction: BridgeIDBTransaction, +): Promise { + return new Promise((resolve, reject) => { + transaction.oncomplete = () => { + resolve(); + }; + transaction.onerror = () => { + reject(); + }; + }); +} \ No newline at end of file diff --git a/packages/idb-bridge/src/idbtypes.ts b/packages/idb-bridge/src/idbtypes.ts index a7878c38f..9ee93e050 100644 --- a/packages/idb-bridge/src/idbtypes.ts +++ b/packages/idb-bridge/src/idbtypes.ts @@ -19,48 +19,27 @@ and limitations under the License. * Instead of ambient types, we export type declarations. */ -/** - * @public - */ export type IDBKeyPath = string; -/** - * @public - */ export interface EventListener { (evt: Event): void; } -/** - * @public - */ export interface EventListenerObject { handleEvent(evt: Event): void; } -/** - * @public - */ export interface EventListenerOptions { capture?: boolean; } -/** - * @public - */ export interface AddEventListenerOptions extends EventListenerOptions { once?: boolean; passive?: boolean; } -/** - * @public - */ export type IDBTransactionMode = "readonly" | "readwrite" | "versionchange"; -/** - * @public - */ export type EventListenerOrEventListenerObject = | EventListener | EventListenerObject; @@ -68,8 +47,6 @@ export type EventListenerOrEventListenerObject = /** * EventTarget is a DOM interface implemented by objects that can receive * events and may have listeners for them. - * - * @public */ export interface EventTarget { /** diff --git a/packages/idb-bridge/src/index.ts b/packages/idb-bridge/src/index.ts index fc99b2ccd..47ff80119 100644 --- a/packages/idb-bridge/src/index.ts +++ b/packages/idb-bridge/src/index.ts @@ -2,14 +2,10 @@ import { Backend, DatabaseConnection, DatabaseTransaction, - IndexProperties, - ObjectStoreProperties, - RecordGetRequest, RecordGetResponse, RecordStoreRequest, RecordStoreResponse, ResultLevel, - Schema, StoreLevel, } from "./backend-interface.js"; import { @@ -36,6 +32,9 @@ import { } from "./MemoryBackend.js"; import { Listener } from "./util/FakeEventTarget.js"; +export * from "./SqliteBackend.js"; +export * from "./sqlite3-interface.js"; + export * from "./idbtypes.js"; export { MemoryBackend } from "./MemoryBackend.js"; export type { AccessStats } from "./MemoryBackend.js"; @@ -55,21 +54,17 @@ export { }; export type { DatabaseTransaction, - RecordGetRequest, RecordGetResponse, - Schema, Backend, DatabaseList, RecordStoreRequest, RecordStoreResponse, DatabaseConnection, - ObjectStoreProperties, RequestObj, DatabaseDump, ObjectStoreDump, IndexRecord, ObjectStoreRecord, - IndexProperties, MemoryBackendDump, Event, Listener, diff --git a/packages/idb-bridge/src/node-sqlite3-impl.ts b/packages/idb-bridge/src/node-sqlite3-impl.ts new file mode 100644 index 000000000..fa38d298f --- /dev/null +++ b/packages/idb-bridge/src/node-sqlite3-impl.ts @@ -0,0 +1,84 @@ +/* + This file is part of GNU Taler + (C) 2023 Taler Systems S.A. + + GNU Taler is free software; you can redistribute it and/or modify it under the + terms of the GNU General Public License as published by the Free Software + Foundation; either version 3, or (at your option) any later version. + + GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY + WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR + A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with + GNU Taler; see the file COPYING. If not, see + */ + +// @ts-ignore: optional dependency +import type Database from "better-sqlite3"; +import { + ResultRow, + Sqlite3Interface, + Sqlite3Statement, +} from "./sqlite3-interface.js"; + +export async function createNodeSqlite3Impl(): Promise { + // @ts-ignore: optional dependency + const bsq = (await import("better-sqlite3")).default; + + return { + open(filename: string) { + const internalDbHandle = bsq(filename); + return { + internalDbHandle, + close() { + internalDbHandle.close(); + }, + prepare(stmtStr): Sqlite3Statement { + const stmtHandle = internalDbHandle.prepare(stmtStr); + return { + internalStatement: stmtHandle, + getAll(params): ResultRow[] { + let res: ResultRow[]; + if (params === undefined) { + res = stmtHandle.all() as ResultRow[]; + } else { + res = stmtHandle.all(params) as ResultRow[]; + } + return res; + }, + getFirst(params): ResultRow | undefined { + let res: ResultRow | undefined; + if (params === undefined) { + res = stmtHandle.get() as ResultRow | undefined; + } else { + res = stmtHandle.get(params) as ResultRow | undefined; + } + return res; + }, + run(params) { + const myParams = []; + if (params !== undefined) { + myParams.push(params); + } + // The better-sqlite3 library doesn't like it we pass + // undefined directly. + let res: Database.RunResult; + if (params !== undefined) { + res = stmtHandle.run(params); + } else { + res = stmtHandle.run(); + } + return { + lastInsertRowid: res.lastInsertRowid, + }; + }, + }; + }, + exec(sqlStr): void { + internalDbHandle.exec(sqlStr); + }, + }; + }, + }; +} diff --git a/packages/idb-bridge/src/sqlite3-interface.ts b/packages/idb-bridge/src/sqlite3-interface.ts new file mode 100644 index 000000000..8668ef844 --- /dev/null +++ b/packages/idb-bridge/src/sqlite3-interface.ts @@ -0,0 +1,34 @@ +export type Sqlite3Database = { + internalDbHandle: any; + exec(sqlStr: string): void; + prepare(stmtStr: string): Sqlite3Statement; + close(): void; +}; +export type Sqlite3Statement = { + internalStatement: any; + + run(params?: BindParams): RunResult; + getAll(params?: BindParams): ResultRow[]; + getFirst(params?: BindParams): ResultRow | undefined; +}; + +export interface RunResult { + lastInsertRowid: number | bigint; +} + +export type Sqlite3Value = string | Uint8Array | number | null | bigint; + +export type BindParams = Record; +export type ResultRow = Record; + +/** + * Common interface that multiple sqlite3 bindings + * (such as better-sqlite3 or qtart's sqlite3 bindings) + * can adapt to. + * + * This does not expose full sqlite3 functionality, but just enough + * to be used by our IndexedDB sqlite3 backend. + */ +export interface Sqlite3Interface { + open(filename: string): Sqlite3Database; +} diff --git a/packages/idb-bridge/src/testingdb.ts b/packages/idb-bridge/src/testingdb.ts new file mode 100644 index 000000000..c6abffa0f --- /dev/null +++ b/packages/idb-bridge/src/testingdb.ts @@ -0,0 +1,43 @@ +/* + Copyright 2023 Taler Systems S.A. + + GNU Taler is free software; you can redistribute it and/or modify it under the + terms of the GNU General Public License as published by the Free Software + Foundation; either version 3, or (at your option) any later version. + + GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY + WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR + A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with + GNU Taler; see the file COPYING. If not, see + */ + +import { createSqliteBackend } from "./SqliteBackend.js"; +import { BridgeIDBFactory } from "./bridge-idb.js"; +import { IDBFactory } from "./idbtypes.js"; +import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js"; + +let idbFactory: IDBFactory | undefined = undefined; + +export async function initTestIndexedDB(): Promise { + // const backend = new MemoryBackend(); + // backend.enableTracing = true; + + const sqlite3Impl = await createNodeSqlite3Impl(); + + const backend = await createSqliteBackend(sqlite3Impl, { + filename: ":memory:", + }); + + idbFactory = new BridgeIDBFactory(backend); + backend.enableTracing = true; + BridgeIDBFactory.enableTracing = false; +} + +export function useTestIndexedDb(): IDBFactory { + if (!idbFactory) { + throw Error("indexeddb factory not initialized"); + } + return idbFactory; +} diff --git a/packages/idb-bridge/src/util/FakeDomEvent.ts b/packages/idb-bridge/src/util/FakeDomEvent.ts new file mode 100644 index 000000000..b3ff298ec --- /dev/null +++ b/packages/idb-bridge/src/util/FakeDomEvent.ts @@ -0,0 +1,103 @@ +/* + Copyright 2017 Jeremy Scheff + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + or implied. See the License for the specific language governing + permissions and limitations under the License. +*/ + +import FakeEventTarget from "./FakeEventTarget.js"; +import { Event, EventTarget } from "../idbtypes.js"; + +/** @public */ +export type EventType = + | "abort" + | "blocked" + | "complete" + | "error" + | "success" + | "upgradeneeded" + | "versionchange"; + +export class FakeDomEvent implements Event { + public eventPath: FakeEventTarget[] = []; + public type: EventType; + + public readonly NONE = 0; + public readonly CAPTURING_PHASE = 1; + public readonly AT_TARGET = 2; + public readonly BUBBLING_PHASE = 3; + + // Flags + public propagationStopped = false; + public immediatePropagationStopped = false; + public canceled = false; + public initialized = true; + public dispatched = false; + + public target: FakeEventTarget | null = null; + public currentTarget: FakeEventTarget | null = null; + + public eventPhase: 0 | 1 | 2 | 3 = 0; + + public defaultPrevented = false; + + public isTrusted = false; + public timeStamp = Date.now(); + + public bubbles: boolean; + public cancelable: boolean; + + constructor( + type: EventType, + eventInitDict: { bubbles?: boolean; cancelable?: boolean } = {}, + ) { + this.type = type; + + this.bubbles = + eventInitDict.bubbles !== undefined ? eventInitDict.bubbles : false; + this.cancelable = + eventInitDict.cancelable !== undefined ? eventInitDict.cancelable : false; + } + cancelBubble: boolean = false; + composed: boolean = false; + returnValue: boolean = false; + get srcElement(): EventTarget | null { + return this.target; + } + composedPath(): EventTarget[] { + throw new Error("Method not implemented."); + } + initEvent( + type: string, + bubbles?: boolean | undefined, + cancelable?: boolean | undefined, + ): void { + throw new Error("Method not implemented."); + } + + public preventDefault() { + if (this.cancelable) { + this.canceled = true; + } + } + + public stopPropagation() { + this.propagationStopped = true; + } + + public stopImmediatePropagation() { + this.propagationStopped = true; + this.immediatePropagationStopped = true; + } +} + +export default FakeDomEvent; diff --git a/packages/idb-bridge/src/util/FakeEventTarget.ts b/packages/idb-bridge/src/util/FakeEventTarget.ts index 79f57cce3..839906a34 100644 --- a/packages/idb-bridge/src/util/FakeEventTarget.ts +++ b/packages/idb-bridge/src/util/FakeEventTarget.ts @@ -180,7 +180,7 @@ abstract class FakeEventTarget implements EventTarget { fe.eventPath.reverse(); fe.eventPhase = event.BUBBLING_PHASE; if (fe.eventPath.length === 0 && event.type === "error") { - console.error("Unhandled error event: ", event.target); + console.error("Unhandled error event on target: ", event.target); } for (const obj of event.eventPath) { if (!event.propagationStopped) { diff --git a/packages/idb-bridge/src/util/extractKey.ts b/packages/idb-bridge/src/util/extractKey.ts index 6a3d468ef..2a4ec45b9 100644 --- a/packages/idb-bridge/src/util/extractKey.ts +++ b/packages/idb-bridge/src/util/extractKey.ts @@ -19,7 +19,11 @@ import { IDBKeyPath, IDBValidKey } from "../idbtypes.js"; import { valueToKey } from "./valueToKey.js"; // http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#dfn-steps-for-extracting-a-key-from-a-value-using-a-key-path +/** + * Algorithm to "extract a key from a value using a key path". + */ export const extractKey = (keyPath: IDBKeyPath | IDBKeyPath[], value: any) => { + //console.log(`extracting key ${JSON.stringify(keyPath)} from ${JSON.stringify(value)}`); if (Array.isArray(keyPath)) { const result: IDBValidKey[] = []; diff --git a/packages/idb-bridge/src/util/key-storage.test.ts b/packages/idb-bridge/src/util/key-storage.test.ts new file mode 100644 index 000000000..dc1e1827c --- /dev/null +++ b/packages/idb-bridge/src/util/key-storage.test.ts @@ -0,0 +1,39 @@ +/* + This file is part of GNU Taler + (C) 2023 Taler Systems S.A. + + GNU Taler is free software; you can redistribute it and/or modify it under the + terms of the GNU General Public License as published by the Free Software + Foundation; either version 3, or (at your option) any later version. + + GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY + WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR + A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with + GNU Taler; see the file COPYING. If not, see + */ + +import test, { ExecutionContext } from "ava"; +import { deserializeKey, serializeKey } from "./key-storage.js"; +import { IDBValidKey } from "../idbtypes.js"; + +function checkKeySer(t: ExecutionContext, k: IDBValidKey): void { + const keyEnc = serializeKey(k); + const keyDec = deserializeKey(keyEnc); + t.deepEqual(k, keyDec); +} + +test("basics", (t) => { + checkKeySer(t, "foo"); + checkKeySer(t, "foo\0bar"); + checkKeySer(t, "foo\u1000bar"); + checkKeySer(t, "foo\u2000bar"); + checkKeySer(t, "foo\u5000bar"); + checkKeySer(t, "foo\uffffbar"); + checkKeySer(t, 42); + checkKeySer(t, 255); + checkKeySer(t, 254); + checkKeySer(t, [1, 2, 3, 4]); + checkKeySer(t, [[[1], 3], [4]]); +}); diff --git a/packages/idb-bridge/src/util/key-storage.ts b/packages/idb-bridge/src/util/key-storage.ts new file mode 100644 index 000000000..b71548dd3 --- /dev/null +++ b/packages/idb-bridge/src/util/key-storage.ts @@ -0,0 +1,363 @@ +/* + This file is part of GNU Taler + (C) 2023 Taler Systems S.A. + + GNU Taler is free software; you can redistribute it and/or modify it under the + terms of the GNU General Public License as published by the Free Software + Foundation; either version 3, or (at your option) any later version. + + GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY + WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR + A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with + GNU Taler; see the file COPYING. If not, see + */ + +/* +Encoding rules (inspired by Firefox, but slightly simplified): + +Numbers: 0x10 n n n n n n n n +Dates: 0x20 n n n n n n n n +Strings: 0x30 s s s s ... 0 +Binaries: 0x40 s s s s ... 0 +Arrays: 0x50 i i i ... 0 + +Numbers/dates are encoded as 64-bit IEEE 754 floats with the sign bit +flipped, in order to make them sortable. +*/ + +/** + * Imports. + */ +import { IDBValidKey } from "../idbtypes.js"; + +const tagNum = 0xa0; +const tagDate = 0xb0; +const tagString = 0xc0; +const tagBinary = 0xc0; +const tagArray = 0xe0; + +const oneByteOffset = 0x01; +const twoByteOffset = 0x7f; +const oneByteMax = 0x7e; +const twoByteMax = 0x3fff + twoByteOffset; +const twoByteMask = 0b1000_0000; +const threeByteMask = 0b1100_0000; + +export function countEncSize(c: number): number { + if (c > twoByteMax) { + return 3; + } + if (c > oneByteMax) { + return 2; + } + return 1; +} + +export function writeEnc(dv: DataView, offset: number, c: number): number { + if (c > twoByteMax) { + dv.setUint8(offset + 2, (c & 0xff) << 6); + dv.setUint8(offset + 1, (c >>> 2) & 0xff); + dv.setUint8(offset, threeByteMask | (c >>> 10)); + return 3; + } else if (c > oneByteMax) { + c -= twoByteOffset; + dv.setUint8(offset + 1, c & 0xff); + dv.setUint8(offset, (c >>> 8) | twoByteMask); + return 2; + } else { + c += oneByteOffset; + dv.setUint8(offset, c); + return 1; + } +} + +export function internalSerializeString( + dv: DataView, + offset: number, + key: string, +): number { + dv.setUint8(offset, tagString); + let n = 1; + for (let i = 0; i < key.length; i++) { + let c = key.charCodeAt(i); + n += writeEnc(dv, offset + n, c); + } + // Null terminator + dv.setUint8(offset + n, 0); + n++; + return n; +} + +export function countSerializeKey(key: IDBValidKey): number { + if (typeof key === "number") { + return 9; + } + if (key instanceof Date) { + return 9; + } + if (key instanceof ArrayBuffer) { + let len = 2; + const uv = new Uint8Array(key); + for (let i = 0; i < uv.length; i++) { + len += countEncSize(uv[i]); + } + return len; + } + if (ArrayBuffer.isView(key)) { + let len = 2; + const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength); + for (let i = 0; i < uv.length; i++) { + len += countEncSize(uv[i]); + } + return len; + } + if (typeof key === "string") { + let len = 2; + for (let i = 0; i < key.length; i++) { + len += countEncSize(key.charCodeAt(i)); + } + return len; + } + if (Array.isArray(key)) { + let len = 2; + for (let i = 0; i < key.length; i++) { + len += countSerializeKey(key[i]); + } + return len; + } + throw Error("unsupported type for key"); +} + +function internalSerializeNumeric( + dv: DataView, + offset: number, + tag: number, + val: number, +): number { + dv.setUint8(offset, tagNum); + dv.setFloat64(offset + 1, val); + // Flip sign bit + let b = dv.getUint8(offset + 1); + b ^= 0x80; + dv.setUint8(offset + 1, b); + return 9; +} + +function internalSerializeArray( + dv: DataView, + offset: number, + key: any[], +): number { + dv.setUint8(offset, tagArray); + let n = 1; + for (let i = 0; i < key.length; i++) { + n += internalSerializeKey(key[i], dv, offset + n); + } + dv.setUint8(offset + n, 0); + n++; + return n; +} + +function internalSerializeBinary( + dv: DataView, + offset: number, + key: Uint8Array, +): number { + dv.setUint8(offset, tagBinary); + let n = 1; + for (let i = 0; i < key.length; i++) { + n += internalSerializeKey(key[i], dv, offset + n); + } + dv.setUint8(offset + n, 0); + n++; + return n; +} + +function internalSerializeKey( + key: IDBValidKey, + dv: DataView, + offset: number, +): number { + if (typeof key === "number") { + return internalSerializeNumeric(dv, offset, tagNum, key); + } + if (key instanceof Date) { + return internalSerializeNumeric(dv, offset, tagDate, key.getDate()); + } + if (typeof key === "string") { + return internalSerializeString(dv, offset, key); + } + if (Array.isArray(key)) { + return internalSerializeArray(dv, offset, key); + } + if (key instanceof ArrayBuffer) { + return internalSerializeBinary(dv, offset, new Uint8Array(key)); + } + if (ArrayBuffer.isView(key)) { + const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength); + return internalSerializeBinary(dv, offset, uv); + } + throw Error("unsupported type for key"); +} + +export function serializeKey(key: IDBValidKey): Uint8Array { + const len = countSerializeKey(key); + let buf = new Uint8Array(len); + const outLen = internalSerializeKey(key, new DataView(buf.buffer), 0); + if (len != outLen) { + throw Error("internal invariant failed"); + } + let numTrailingZeroes = 0; + for (let i = buf.length - 1; i >= 0 && buf[i] == 0; i--, numTrailingZeroes++); + if (numTrailingZeroes > 0) { + buf = buf.slice(0, buf.length - numTrailingZeroes); + } + return buf; +} + +function internalReadString(dv: DataView, offset: number): [number, string] { + const chars: string[] = []; + while (offset < dv.byteLength) { + const v = dv.getUint8(offset); + if (v == 0) { + // Got end-of-string. + offset += 1; + break; + } + let c: number; + if ((v & threeByteMask) === threeByteMask) { + const b1 = v; + const b2 = dv.getUint8(offset + 1); + const b3 = dv.getUint8(offset + 2); + c = (b1 << 10) | (b2 << 2) | (b3 >> 6); + offset += 3; + } else if ((v & twoByteMask) === twoByteMask) { + const b1 = v & ~twoByteMask; + const b2 = dv.getUint8(offset + 1); + c = ((b1 << 8) | b2) + twoByteOffset; + offset += 2; + } else { + c = v - oneByteOffset; + offset += 1; + } + chars.push(String.fromCharCode(c)); + } + return [offset, chars.join("")]; +} + +function internalReadBytes(dv: DataView, offset: number): [number, Uint8Array] { + let count = 0; + while (offset + count < dv.byteLength) { + const v = dv.getUint8(offset + count); + if (v === 0) { + break; + } + count++; + } + let writePos = 0; + const bytes = new Uint8Array(count); + while (offset < dv.byteLength) { + const v = dv.getUint8(offset); + if (v == 0) { + offset += 1; + break; + } + let c: number; + if ((v & threeByteMask) === threeByteMask) { + const b1 = v; + const b2 = dv.getUint8(offset + 1); + const b3 = dv.getUint8(offset + 2); + c = (b1 << 10) | (b2 << 2) | (b3 >> 6); + offset += 3; + } else if ((v & twoByteMask) === twoByteMask) { + const b1 = v & ~twoByteMask; + const b2 = dv.getUint8(offset + 1); + c = ((b1 << 8) | b2) + twoByteOffset; + offset += 2; + } else { + c = v - oneByteOffset; + offset += 1; + } + bytes[writePos] = c; + writePos++; + } + return [offset, bytes]; +} + +/** + * Same as DataView.getFloat64, but logically pad input + * with zeroes on the right if read offset would be out + * of bounds. + * + * This allows reading from buffers where zeros have been + * truncated. + */ +function getFloat64Trunc(dv: DataView, offset: number): number { + if (offset + 7 >= dv.byteLength) { + const buf = new Uint8Array(8); + for (let i = offset; i < dv.byteLength; i++) { + buf[i - offset] = dv.getUint8(i); + } + const dv2 = new DataView(buf.buffer); + return dv2.getFloat64(0); + } else { + return dv.getFloat64(offset); + } +} + +function internalDeserializeKey( + dv: DataView, + offset: number, +): [number, IDBValidKey] { + let tag = dv.getUint8(offset); + switch (tag) { + case tagNum: { + const num = -getFloat64Trunc(dv, offset + 1); + const newOffset = Math.min(offset + 9, dv.byteLength); + return [newOffset, num]; + } + case tagDate: { + const num = -getFloat64Trunc(dv, offset + 1); + const newOffset = Math.min(offset + 9, dv.byteLength); + return [newOffset, new Date(num)]; + } + case tagString: { + return internalReadString(dv, offset + 1); + } + case tagBinary: { + return internalReadBytes(dv, offset + 1); + } + case tagArray: { + const arr: any[] = []; + offset += 1; + while (offset < dv.byteLength) { + const innerTag = dv.getUint8(offset); + if (innerTag === 0) { + offset++; + break; + } + const [innerOff, innerVal] = internalDeserializeKey(dv, offset); + arr.push(innerVal); + offset = innerOff; + } + return [offset, arr]; + } + default: + throw Error("invalid key (unrecognized tag)"); + } +} + +export function deserializeKey(encodedKey: Uint8Array): IDBValidKey { + const dv = new DataView( + encodedKey.buffer, + encodedKey.byteOffset, + encodedKey.byteLength, + ); + let [off, res] = internalDeserializeKey(dv, 0); + if (off != encodedKey.byteLength) { + throw Error("internal invariant failed"); + } + return res; +} diff --git a/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts b/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts index 971697021..c1216fe97 100644 --- a/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts +++ b/packages/idb-bridge/src/util/makeStoreKeyValue.test.ts @@ -20,55 +20,73 @@ import { makeStoreKeyValue } from "./makeStoreKeyValue.js"; test("basics", (t) => { let result; - result = makeStoreKeyValue({ name: "Florian" }, undefined, 42, true, "id"); + result = makeStoreKeyValue({ + value: { name: "Florian" }, + key: undefined, + currentKeyGenerator: 42, + autoIncrement: true, + keyPath: "id", + }); t.is(result.updatedKeyGenerator, 43); t.is(result.key, 42); t.is(result.value.name, "Florian"); t.is(result.value.id, 42); - result = makeStoreKeyValue( - { name: "Florian", id: 10 }, - undefined, - 5, - true, - "id", - ); + result = makeStoreKeyValue({ + value: { name: "Florian", id: 10 }, + key: undefined, + currentKeyGenerator: 5, + autoIncrement: true, + keyPath: "id", + }); t.is(result.updatedKeyGenerator, 11); t.is(result.key, 10); t.is(result.value.name, "Florian"); t.is(result.value.id, 10); - result = makeStoreKeyValue( - { name: "Florian", id: 5 }, - undefined, - 10, - true, - "id", - ); + result = makeStoreKeyValue({ + value: { name: "Florian", id: 5 }, + key: undefined, + currentKeyGenerator: 10, + autoIncrement: true, + keyPath: "id", + }); t.is(result.updatedKeyGenerator, 10); t.is(result.key, 5); t.is(result.value.name, "Florian"); t.is(result.value.id, 5); - result = makeStoreKeyValue( - { name: "Florian", id: "foo" }, - undefined, - 10, - true, - "id", - ); + result = makeStoreKeyValue({ + value: { name: "Florian", id: "foo" }, + key: undefined, + currentKeyGenerator: 10, + autoIncrement: true, + keyPath: "id", + }); t.is(result.updatedKeyGenerator, 10); t.is(result.key, "foo"); t.is(result.value.name, "Florian"); t.is(result.value.id, "foo"); - result = makeStoreKeyValue({ name: "Florian" }, "foo", 10, true, null); + result = makeStoreKeyValue({ + value: { name: "Florian" }, + key: "foo", + currentKeyGenerator: 10, + autoIncrement: true, + keyPath: null, + }); t.is(result.updatedKeyGenerator, 10); t.is(result.key, "foo"); t.is(result.value.name, "Florian"); t.is(result.value.id, undefined); - result = makeStoreKeyValue({ name: "Florian" }, undefined, 10, true, null); + result = makeStoreKeyValue({ + value: { name: "Florian" }, + key: undefined, + currentKeyGenerator: 10, + autoIncrement: true, + keyPath: null, + }); t.is(result.updatedKeyGenerator, 11); t.is(result.key, 10); t.is(result.value.name, "Florian"); diff --git a/packages/idb-bridge/src/util/makeStoreKeyValue.ts b/packages/idb-bridge/src/util/makeStoreKeyValue.ts index 4c7dab8d2..153cd9d81 100644 --- a/packages/idb-bridge/src/util/makeStoreKeyValue.ts +++ b/packages/idb-bridge/src/util/makeStoreKeyValue.ts @@ -75,19 +75,25 @@ function injectKey( return newValue; } -export function makeStoreKeyValue( - value: any, - key: IDBValidKey | undefined, - currentKeyGenerator: number, - autoIncrement: boolean, - keyPath: IDBKeyPath | IDBKeyPath[] | null, -): StoreKeyResult { +export interface MakeStoreKvRequest { + value: any; + key: IDBValidKey | undefined; + currentKeyGenerator: number; + autoIncrement: boolean; + keyPath: IDBKeyPath | IDBKeyPath[] | null; +} + +export function makeStoreKeyValue(req: MakeStoreKvRequest): StoreKeyResult { + const { keyPath, currentKeyGenerator, autoIncrement } = req; + let { key, value } = req; + const haveKey = key !== null && key !== undefined; const haveKeyPath = keyPath !== null && keyPath !== undefined; // This models a decision table on (haveKey, haveKeyPath, autoIncrement) try { + // FIXME: Perf: only do this if we need to inject something. value = structuredClone(value); } catch (e) { throw new DataCloneError(); diff --git a/packages/idb-bridge/src/util/queueTask.ts b/packages/idb-bridge/src/util/queueTask.ts index 297602c67..f8a6e799f 100644 --- a/packages/idb-bridge/src/util/queueTask.ts +++ b/packages/idb-bridge/src/util/queueTask.ts @@ -14,6 +14,11 @@ permissions and limitations under the License. */ +/** + * Queue a task to be executed *after* the microtask + * queue has been processed, but *before* subsequent setTimeout / setImmediate + * tasks. + */ export function queueTask(fn: () => void) { let called = false; const callFirst = () => { diff --git a/packages/idb-bridge/src/util/structuredClone.test.ts b/packages/idb-bridge/src/util/structuredClone.test.ts index 0c613e6cc..e13d4117f 100644 --- a/packages/idb-bridge/src/util/structuredClone.test.ts +++ b/packages/idb-bridge/src/util/structuredClone.test.ts @@ -15,7 +15,11 @@ */ import test, { ExecutionContext } from "ava"; -import { structuredClone } from "./structuredClone.js"; +import { + structuredClone, + structuredEncapsulate, + structuredRevive, +} from "./structuredClone.js"; function checkClone(t: ExecutionContext, x: any): void { t.deepEqual(structuredClone(x), x); @@ -59,3 +63,58 @@ test("structured clone (object cycles)", (t) => { const obj1Clone = structuredClone(obj1); t.is(obj1Clone, obj1Clone.c); }); + +test("encapsulate", (t) => { + t.deepEqual(structuredEncapsulate(42), 42); + t.deepEqual(structuredEncapsulate(true), true); + t.deepEqual(structuredEncapsulate(false), false); + t.deepEqual(structuredEncapsulate(null), null); + + t.deepEqual(structuredEncapsulate(undefined), { $: "undef" }); + t.deepEqual(structuredEncapsulate(42n), { $: "bigint", val: "42" }); + + t.deepEqual(structuredEncapsulate(new Date(42)), { $: "date", val: 42 }); + + t.deepEqual(structuredEncapsulate({ x: 42 }), { x: 42 }); + + t.deepEqual(structuredEncapsulate({ $: "bla", x: 42 }), { + $: "obj", + val: { $: "bla", x: 42 }, + }); + + const x = { foo: 42, bar: {} } as any; + x.bar.baz = x; + + t.deepEqual(structuredEncapsulate(x), { + foo: 42, + bar: { + baz: { $: "ref", d: 2, p: [] }, + }, + }); +}); + +test("revive", (t) => { + t.deepEqual(structuredRevive(42), 42); + t.deepEqual(structuredRevive([1, 2, 3]), [1, 2, 3]); + t.deepEqual(structuredRevive(true), true); + t.deepEqual(structuredRevive(false), false); + t.deepEqual(structuredRevive(null), null); + t.deepEqual(structuredRevive({ $: "undef" }), undefined); + t.deepEqual(structuredRevive({ x: { $: "undef" } }), { x: undefined }); + + t.deepEqual(structuredRevive({ $: "date", val: 42}), new Date(42)); + + { + const x = { foo: 42, bar: {} } as any; + x.bar.baz = x; + + const r = { + foo: 42, + bar: { + baz: { $: "ref", d: 2, p: [] }, + }, + }; + + t.deepEqual(structuredRevive(r), x); + } +}); diff --git a/packages/idb-bridge/src/util/structuredClone.ts b/packages/idb-bridge/src/util/structuredClone.ts index 2170118d5..2f857c6c5 100644 --- a/packages/idb-bridge/src/util/structuredClone.ts +++ b/packages/idb-bridge/src/util/structuredClone.ts @@ -16,22 +16,21 @@ /** * Encoding (new, compositional version): - * + * * Encapsulate object that itself might contain a "$" field: - * { $: { E... } } + * { $: "obj", val: ... } + * (Outer level only:) Wrap other values into object + * { $: "lit", val: ... } * Circular reference: - * { $: ["ref", uplevel, field...] } + * { $: "ref" l: uplevel, p: path } * Date: - * { $: ["data"], val: datestr } + * { $: "date", val: datestr } * Bigint: - * { $: ["bigint"], val: bigintstr } + * { $: "bigint", val: bigintstr } * Array with special (non-number) attributes: - * { $: ["array"], val: arrayobj } + * { $: "array", val: arrayobj } * Undefined field * { $: "undef" } - * - * Legacy (top-level only), for backwards compatibility: - * { $types: [...] } */ /** @@ -261,22 +260,18 @@ export function mkDeepCloneCheckOnly() { function internalEncapsulate( val: any, - outRoot: any, path: string[], memo: Map, - types: Array<[string[], string]>, ): any { const memoPath = memo.get(val); if (memoPath) { - types.push([path, "ref"]); - return memoPath; + return { $: "ref", d: path.length, p: memoPath }; } if (val === null) { return null; } if (val === undefined) { - types.push([path, "undef"]); - return 0; + return { $: "undef" }; } if (Array.isArray(val)) { memo.set(val, path); @@ -289,31 +284,33 @@ function internalEncapsulate( break; } } - if (special) { - types.push([path, "array"]); - } for (const x in val) { const p = [...path, x]; - outArr[x] = internalEncapsulate(val[x], outRoot, p, memo, types); + outArr[x] = internalEncapsulate(val[x], p, memo); + } + if (special) { + return { $: "array", val: outArr }; + } else { + return outArr; } - return outArr; } if (val instanceof Date) { - types.push([path, "date"]); - return val.getTime(); + return { $: "date", val: val.getTime() }; } if (isUserObject(val) || isPlainObject(val)) { memo.set(val, path); const outObj: any = {}; for (const x in val) { const p = [...path, x]; - outObj[x] = internalEncapsulate(val[x], outRoot, p, memo, types); + outObj[x] = internalEncapsulate(val[x], p, memo); + } + if ("$" in outObj) { + return { $: "obj", val: outObj }; } return outObj; } if (typeof val === "bigint") { - types.push([path, "bigint"]); - return val.toString(); + return { $: "bigint", val: val.toString() }; } if (typeof val === "boolean") { return val; @@ -327,123 +324,103 @@ function internalEncapsulate( throw Error(); } -/** - * Encapsulate a cloneable value into a plain JSON object. - */ -export function structuredEncapsulate(val: any): any { - const outRoot = {}; - const types: Array<[string[], string]> = []; - let res; - res = internalEncapsulate(val, outRoot, [], new Map(), types); - if (res === null) { - return res; - } - // We need to further encapsulate the outer layer - if ( - Array.isArray(res) || - typeof res !== "object" || - "$" in res || - "$types" in res - ) { - res = { $: res }; - } - if (types.length > 0) { - res["$types"] = types; - } - return res; +function derefPath( + root: any, + p1: Array, + n: number, + p2: Array, +): any { + let v = root; + for (let i = 0; i < n; i++) { + v = v[p1[i]]; + } + for (let i = 0; i < p2.length; i++) { + v = v[p2[i]]; + } + return v; } -export function applyLegacyTypeAnnotations(val: any): any { - if (val === null) { - return null; +function internalReviveArray(sval: any, root: any, path: string[]): any { + const newArr: any[] = []; + if (root === undefined) { + root = newArr; } - if (typeof val === "number") { - return val; + for (let i = 0; i < sval.length; i++) { + const p = [...path, String(i)]; + newArr.push(internalStructuredRevive(sval[i], root, p)); } - if (typeof val === "string") { - return val; + return newArr; +} + +function internalReviveObject(sval: any, root: any, path: string[]): any { + const newObj = {} as any; + if (root === undefined) { + root = newObj; } - if (typeof val === "boolean") { - return val; + for (const key of Object.keys(sval)) { + const p = [...path, key]; + newObj[key] = internalStructuredRevive(sval[key], root, p); } - if (!isPlainObject(val)) { - throw Error(); - } - let types = val.$types ?? []; - delete val.$types; - let outRoot: any; - if ("$" in val) { - outRoot = val.$; - } else { - outRoot = val; - } - function mutatePath(path: string[], f: (x: any) => any): void { - if (path.length == 0) { - outRoot = f(outRoot); - return; - } - let obj = outRoot; - for (let i = 0; i < path.length - 1; i++) { - const n = path[i]; - if (!(n in obj)) { - obj[n] = {}; - } - obj = obj[n]; - } - const last = path[path.length - 1]; - obj[last] = f(obj[last]); + return newObj; +} + +function internalStructuredRevive(sval: any, root: any, path: string[]): any { + if (typeof sval === "string") { + return sval; } - function lookupPath(path: string[]): any { - let obj = outRoot; - for (const n of path) { - obj = obj[n]; - } - return obj; + if (typeof sval === "number") { + return sval; } - for (const [path, type] of types) { - switch (type) { - case "bigint": { - mutatePath(path, (x) => BigInt(x)); - break; - } - case "array": { - mutatePath(path, (x) => { - const newArr: any = []; - for (const k in x) { - newArr[k] = x[k]; - } - return newArr; - }); - break; - } - case "date": { - mutatePath(path, (x) => new Date(x)); - break; - } - case "undef": { - mutatePath(path, (x) => undefined); - break; - } - case "ref": { - mutatePath(path, (x) => lookupPath(x)); - break; + if (typeof sval === "boolean") { + return sval; + } + if (sval === null) { + return null; + } + if (Array.isArray(sval)) { + return internalReviveArray(sval, root, path); + } + + if (isUserObject(sval) || isPlainObject(sval)) { + if ("$" in sval) { + const dollar = sval.$; + switch (dollar) { + case "undef": + return undefined; + case "bigint": + return BigInt((sval as any).val); + case "date": + return new Date((sval as any).val); + case "obj": { + return internalReviveObject((sval as any).val, root, path); + } + case "array": + return internalReviveArray((sval as any).val, root, path); + case "ref": { + const level = (sval as any).l; + const p2 = (sval as any).p; + return derefPath(root, path, path.length - level, p2); + } + default: + throw Error(); } - default: - throw Error(`type '${type}' not implemented`); + } else { + return internalReviveObject(sval, root, path); } } - return outRoot; + + throw Error(); } -export function internalStructuredRevive(val: any): any { - // FIXME: Do the newly specified, compositional encoding here. - val = JSON.parse(JSON.stringify(val)); - return val; +/** + * Encapsulate a cloneable value into a plain JSON value. + */ +export function structuredEncapsulate(val: any): any { + return internalEncapsulate(val, [], new Map()); } -export function structuredRevive(val: any): any { - const r = internalStructuredRevive(val); - return applyLegacyTypeAnnotations(r); +export function structuredRevive(sval: any): any { + return internalStructuredRevive(sval, undefined, []); } /** diff --git a/packages/idb-bridge/src/util/valueToKey.ts b/packages/idb-bridge/src/util/valueToKey.ts index 6df82af81..0cd824689 100644 --- a/packages/idb-bridge/src/util/valueToKey.ts +++ b/packages/idb-bridge/src/util/valueToKey.ts @@ -17,7 +17,11 @@ import { IDBValidKey } from "../idbtypes.js"; import { DataError } from "./errors.js"; -// https://www.w3.org/TR/IndexedDB-2/#convert-a-value-to-a-key +/** + * Algorithm to "convert a value to a key". + * + * https://www.w3.org/TR/IndexedDB/#convert-value-to-key + */ export function valueToKey( input: any, seen?: Set, diff --git a/packages/idb-bridge/tsconfig.json b/packages/idb-bridge/tsconfig.json index b0a6808f4..19e9c2a74 100644 --- a/packages/idb-bridge/tsconfig.json +++ b/packages/idb-bridge/tsconfig.json @@ -4,7 +4,7 @@ "lib": ["es6"], "module": "ES2020", "moduleResolution": "Node16", - "target": "ES6", + "target": "ES2020", "allowJs": true, "noImplicitAny": true, "outDir": "lib", -- cgit v1.2.3