sqlite3 backend for idb-bridge / wallet-core

This commit is contained in:
Florian Dold 2023-07-11 15:41:48 +02:00
parent 58fdf9dc09
commit b2d0ad57dd
No known key found for this signature in database
GPG Key ID: D2E4F00F29D02A4B
75 changed files with 5752 additions and 1496 deletions

View File

@ -18,22 +18,26 @@
"exports": {
".": {
"default": "./lib/index.js"
},
"./node-sqlite3-bindings": {
"default": "./lib/node-sqlite3-impl.js"
}
},
"devDependencies": {
"@types/node": "^18.11.17",
"ava": "^4.3.3",
"esm": "^3.2.25",
"@types/better-sqlite3": "^7.6.4",
"@types/node": "^20.4.1",
"ava": "^5.3.1",
"prettier": "^2.8.8",
"rimraf": "^3.0.2",
"typescript": "^5.1.3"
"rimraf": "^5.0.1",
"typescript": "^5.1.6"
},
"dependencies": {
"tslib": "^2.5.3"
"tslib": "^2.6.0"
},
"ava": {
"require": [
"esm"
]
"failFast": true
},
"optionalDependencies": {
"better-sqlite3": "^8.4.0"
}
}

View File

@ -15,334 +15,9 @@
*/
import test from "ava";
import {
BridgeIDBCursorWithValue,
BridgeIDBDatabase,
BridgeIDBFactory,
BridgeIDBKeyRange,
BridgeIDBRequest,
BridgeIDBTransaction,
} from "./bridge-idb.js";
import {
IDBCursorDirection,
IDBCursorWithValue,
IDBDatabase,
IDBKeyRange,
IDBValidKey,
} from "./idbtypes.js";
import { MemoryBackend } from "./MemoryBackend.js";
function promiseFromRequest(request: BridgeIDBRequest): Promise<any> {
return new Promise((resolve, reject) => {
request.onsuccess = () => {
resolve(request.result);
};
request.onerror = () => {
reject(request.error);
};
});
}
function promiseFromTransaction(
transaction: BridgeIDBTransaction,
): Promise<void> {
return new Promise<void>((resolve, reject) => {
transaction.oncomplete = () => {
resolve();
};
transaction.onerror = () => {
reject();
};
});
}
test("Spec: Example 1 Part 1", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
// Populate with initial data.
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
};
await promiseFromRequest(request);
t.pass();
});
test("Spec: Example 1 Part 2", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "library");
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
t.pass();
});
test("Spec: Example 1 Part 3", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "library");
const tx = db.transaction("books", "readwrite");
const store = tx.objectStore("books");
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readonly");
const store2 = tx2.objectStore("books");
var index2 = store2.index("by_title");
const request2 = index2.get("Bedrock Nights");
const result2: any = await promiseFromRequest(request2);
t.is(result2.author, "Barney");
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
await promiseFromRequest(request3);
let cursor: BridgeIDBCursorWithValue | null;
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request3);
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 234567);
await promiseFromTransaction(tx3);
const tx4 = db.transaction("books", "readonly");
const store4 = tx4.objectStore("books");
const request4 = store4.openCursor();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 234567);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 345678);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
t.is(cursor, null);
const tx5 = db.transaction("books", "readonly");
const store5 = tx5.objectStore("books");
const index5 = store5.index("by_author");
const request5 = index5.openCursor(null, "next");
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
t.is(cursor, null);
const request6 = index5.openCursor(null, "nextunique");
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
t.is(cursor, null);
const request7 = index5.openCursor(null, "prevunique");
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
t.is(cursor, null);
db.close();
t.pass();
});
test("simple deletion", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "library");
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readwrite");
const store2 = tx2.objectStore("books");
const req1 = store2.get(234567);
await promiseFromRequest(req1);
t.is(req1.readyState, "done");
t.is(req1.result.author, "Fred");
store2.delete(123456);
const req2 = store2.get(123456);
await promiseFromRequest(req2);
t.is(req2.readyState, "done");
t.is(req2.result, undefined);
const req3 = store2.get(234567);
await promiseFromRequest(req3);
t.is(req3.readyState, "done");
t.is(req3.result.author, "Fred");
await promiseFromTransaction(tx2);
t.pass();
});
import { BridgeIDBDatabase, BridgeIDBFactory } from "./bridge-idb.js";
import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js";
test("export", async (t) => {
const backend = new MemoryBackend();
@ -386,276 +61,3 @@ test("export", async (t) => {
t.is(exportedData2.databases["library"].schema.databaseVersion, 42);
t.pass();
});
test("update with non-existent index values", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("mydb");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "mydb");
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_z");
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_z");
{
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes, undefined);
}
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
t.pass();
});
test("delete from unique index", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("mydb");
request.onupgradeneeded = () => {
const db = request.result as IDBDatabase;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_yz", ["y", "z"], {
unique: true,
});
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "mydb");
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_yz");
const indRes = await promiseFromRequest(index.get(["a", 42]));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42, extra: 123 });
await promiseFromTransaction(tx);
}
t.pass();
});
test("range queries", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("mydb");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "mydb");
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
store.put({ x: 2, y: "a" });
store.put({ x: 4, y: "b" });
store.put({ x: 8, y: "b" });
store.put({ x: 10, y: "c" });
store.put({ x: 12, y: "c" });
await promiseFromTransaction(tx);
async function doCursorStoreQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const vals: any[] = [];
const req = store.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
async function doCursorIndexQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_y");
const vals: any[] = [];
const req = index.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
await doCursorStoreQuery(undefined, undefined, [
{
x: 0,
y: "a",
},
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
{
x: 12,
y: "c",
},
]);
await doCursorStoreQuery(
BridgeIDBKeyRange.bound(0, 12, true, true),
undefined,
[
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
],
);
await doCursorIndexQuery(
BridgeIDBKeyRange.bound("a", "c", true, true),
undefined,
[
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
],
);
await doCursorIndexQuery(undefined, "nextunique", [
{
x: 0,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 10,
y: "c",
},
]);
await doCursorIndexQuery(undefined, "prevunique", [
{
x: 10,
y: "c",
},
{
x: 4,
y: "b",
},
{
x: 0,
y: "a",
},
]);
db.close();
t.pass();
});

View File

@ -14,43 +14,38 @@
permissions and limitations under the License.
*/
import { AsyncCondition, TransactionLevel } from "./backend-common.js";
import {
Backend,
ConnectResult,
DatabaseConnection,
DatabaseTransaction,
Schema,
RecordStoreRequest,
IndexProperties,
RecordGetRequest,
IndexGetQuery,
IndexMeta,
ObjectStoreGetQuery,
ObjectStoreMeta,
RecordGetResponse,
RecordStoreRequest,
RecordStoreResponse,
ResultLevel,
StoreLevel,
RecordStoreResponse,
} from "./backend-interface.js";
import { BridgeIDBKeyRange } from "./bridge-idb.js";
import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js";
import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js";
import { compareKeys } from "./util/cmp.js";
import { ConstraintError, DataError } from "./util/errors.js";
import { getIndexKeys } from "./util/getIndexKeys.js";
import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
import {
structuredClone,
structuredEncapsulate,
structuredRevive,
} from "./util/structuredClone.js";
import { ConstraintError, DataError } from "./util/errors.js";
import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js";
import { compareKeys } from "./util/cmp.js";
import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
import { getIndexKeys } from "./util/getIndexKeys.js";
import { openPromise } from "./util/openPromise.js";
import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js";
import { BridgeIDBKeyRange } from "./bridge-idb.js";
type Key = IDBValidKey;
type Value = unknown;
enum TransactionLevel {
None = 0,
Read = 1,
Write = 2,
VersionChange = 3,
}
interface ObjectStore {
originalName: string;
modifiedName: string | undefined;
@ -95,24 +90,39 @@ interface Database {
connectionCookies: string[];
}
/** @public */
export interface ObjectStoreDump {
name: string;
keyGenerator: number;
records: ObjectStoreRecord[];
}
/** @public */
export interface DatabaseDump {
schema: Schema;
objectStores: { [name: string]: ObjectStoreDump };
}
/** @public */
export interface MemoryBackendDump {
databases: { [name: string]: DatabaseDump };
}
export interface ObjectStoreProperties {
keyPath: string | string[] | null;
autoIncrement: boolean;
indexes: { [nameame: string]: IndexProperties };
}
export interface IndexProperties {
keyPath: string | string[];
multiEntry: boolean;
unique: boolean;
}
export interface Schema {
databaseName: string;
databaseVersion: number;
objectStores: { [name: string]: ObjectStoreProperties };
}
interface ObjectStoreMapEntry {
store: ObjectStore;
indexMap: { [currentName: string]: Index };
@ -142,27 +152,6 @@ export interface ObjectStoreRecord {
value: Value;
}
class AsyncCondition {
_waitPromise: Promise<void>;
_resolveWaitPromise: () => void;
constructor() {
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
wait(): Promise<void> {
return this._waitPromise;
}
trigger(): void {
this._resolveWaitPromise();
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
}
function nextStoreKey<T>(
forward: boolean,
data: ISortedMapF<Key, ObjectStoreRecord>,
@ -178,12 +167,6 @@ function nextStoreKey<T>(
return res[1].primaryKey;
}
function assertInvariant(cond: boolean): asserts cond {
if (!cond) {
throw Error("invariant failed");
}
}
function nextKey(
forward: boolean,
tree: ISortedSetF<IDBValidKey>,
@ -230,6 +213,7 @@ function furthestKey(
}
export interface AccessStats {
primitiveStatements: number;
writeTransactions: number;
readTransactions: number;
writesPerStore: Record<string, number>;
@ -279,6 +263,7 @@ export class MemoryBackend implements Backend {
trackStats: boolean = true;
accessStats: AccessStats = {
primitiveStatements: 0,
readTransactions: 0,
writeTransactions: 0,
readsPerStore: {},
@ -459,7 +444,7 @@ export class MemoryBackend implements Backend {
delete this.databases[name];
}
async connectDatabase(name: string): Promise<DatabaseConnection> {
async connectDatabase(name: string): Promise<ConnectResult> {
if (this.enableTracing) {
console.log(`TRACING: connectDatabase(${name})`);
}
@ -498,7 +483,11 @@ export class MemoryBackend implements Backend {
this.connections[connectionCookie] = myConn;
return { connectionCookie };
return {
conn: { connectionCookie },
version: database.committedSchema.databaseVersion,
objectStores: Object.keys(database.committedSchema.objectStores).sort(),
};
}
async beginTransaction(
@ -601,14 +590,6 @@ export class MemoryBackend implements Backend {
this.disconnectCond.trigger();
}
private requireConnection(dbConn: DatabaseConnection): Connection {
const myConn = this.connections[dbConn.connectionCookie];
if (!myConn) {
throw Error(`unknown connection (${dbConn.connectionCookie})`);
}
return myConn;
}
private requireConnectionFromTransaction(
btx: DatabaseTransaction,
): Connection {
@ -619,36 +600,6 @@ export class MemoryBackend implements Backend {
return myConn;
}
getSchema(dbConn: DatabaseConnection): Schema {
if (this.enableTracing) {
console.log(`TRACING: getSchema`);
}
const myConn = this.requireConnection(dbConn);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
return db.committedSchema;
}
getCurrentTransactionSchema(btx: DatabaseTransaction): Schema {
const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
return myConn.modifiedSchema;
}
getInitialTransactionSchema(btx: DatabaseTransaction): Schema {
const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
return db.committedSchema;
}
renameIndex(
btx: DatabaseTransaction,
objectStoreName: string,
@ -799,7 +750,7 @@ export class MemoryBackend implements Backend {
createObjectStore(
btx: DatabaseTransaction,
name: string,
keyPath: string[] | null,
keyPath: string | string[] | null,
autoIncrement: boolean,
): void {
if (this.enableTracing) {
@ -842,7 +793,7 @@ export class MemoryBackend implements Backend {
btx: DatabaseTransaction,
indexName: string,
objectStoreName: string,
keyPath: string[],
keyPath: string | string[],
multiEntry: boolean,
unique: boolean,
): void {
@ -1102,12 +1053,91 @@ export class MemoryBackend implements Backend {
}
}
async getRecords(
async getObjectStoreRecords(
btx: DatabaseTransaction,
req: RecordGetRequest,
req: ObjectStoreGetQuery,
): Promise<RecordGetResponse> {
if (this.enableTracing) {
console.log(`TRACING: getRecords`);
console.log(`TRACING: getObjectStoreRecords`);
console.log("query", req);
}
const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
if (db.txLevel < TransactionLevel.Read) {
throw Error("only allowed while running a transaction");
}
if (
db.txRestrictObjectStores &&
!db.txRestrictObjectStores.includes(req.objectStoreName)
) {
throw Error(
`Not allowed to access store '${
req.objectStoreName
}', transaction is over ${JSON.stringify(db.txRestrictObjectStores)}`,
);
}
const objectStoreMapEntry = myConn.objectStoreMap[req.objectStoreName];
if (!objectStoreMapEntry) {
throw Error("object store not found");
}
let range;
if (req.range == null) {
range = new BridgeIDBKeyRange(undefined, undefined, true, true);
} else {
range = req.range;
}
if (typeof range !== "object") {
throw Error(
"getObjectStoreRecords was given an invalid range (sanity check failed, not an object)",
);
}
if (!("lowerOpen" in range)) {
throw Error(
"getObjectStoreRecords was given an invalid range (sanity check failed, lowerOpen missing)",
);
}
const forward: boolean =
req.direction === "next" || req.direction === "nextunique";
const storeData =
objectStoreMapEntry.store.modifiedData ||
objectStoreMapEntry.store.originalData;
const resp = getObjectStoreRecords({
forward,
storeData,
limit: req.limit,
range,
resultLevel: req.resultLevel,
advancePrimaryKey: req.advancePrimaryKey,
lastObjectStorePosition: req.lastObjectStorePosition,
});
if (this.trackStats) {
const k = `${req.objectStoreName}`;
this.accessStats.readsPerStore[k] =
(this.accessStats.readsPerStore[k] ?? 0) + 1;
this.accessStats.readItemsPerStore[k] =
(this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
}
if (this.enableTracing) {
console.log(`TRACING: getRecords got ${resp.count} results`);
}
return resp;
}
async getIndexRecords(
btx: DatabaseTransaction,
req: IndexGetQuery,
): Promise<RecordGetResponse> {
if (this.enableTracing) {
console.log(`TRACING: getIndexRecords`);
console.log("query", req);
}
const myConn = this.requireConnectionFromTransaction(btx);
@ -1161,15 +1191,10 @@ export class MemoryBackend implements Backend {
objectStoreMapEntry.store.modifiedData ||
objectStoreMapEntry.store.originalData;
const haveIndex = req.indexName !== undefined;
let resp: RecordGetResponse;
if (haveIndex) {
const index =
myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!];
const indexData = index.modifiedData || index.originalData;
resp = getIndexRecords({
const resp = getIndexRecords({
forward,
indexData,
storeData,
@ -1189,30 +1214,8 @@ export class MemoryBackend implements Backend {
this.accessStats.readItemsPerIndex[k] =
(this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count;
}
} else {
if (req.advanceIndexKey !== undefined) {
throw Error("unsupported request");
}
resp = getObjectStoreRecords({
forward,
storeData,
limit: req.limit,
range,
resultLevel: req.resultLevel,
advancePrimaryKey: req.advancePrimaryKey,
lastIndexPosition: req.lastIndexPosition,
lastObjectStorePosition: req.lastObjectStorePosition,
});
if (this.trackStats) {
const k = `${req.objectStoreName}`;
this.accessStats.readsPerStore[k] =
(this.accessStats.readsPerStore[k] ?? 0) + 1;
this.accessStats.readItemsPerStore[k] =
(this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
}
}
if (this.enableTracing) {
console.log(`TRACING: getRecords got ${resp.count} results`);
console.log(`TRACING: getIndexRecords got ${resp.count} results`);
}
return resp;
}
@ -1294,13 +1297,13 @@ export class MemoryBackend implements Backend {
let storeKeyResult: StoreKeyResult;
try {
storeKeyResult = makeStoreKeyValue(
storeReq.value,
storeReq.key,
keygen,
autoIncrement,
keyPath,
);
storeKeyResult = makeStoreKeyValue({
value: storeReq.value,
key: storeReq.key,
currentKeyGenerator: keygen,
autoIncrement: autoIncrement,
keyPath: keyPath,
});
} catch (e) {
if (e instanceof DataError) {
const kp = JSON.stringify(keyPath);
@ -1445,7 +1448,7 @@ export class MemoryBackend implements Backend {
}
}
async rollback(btx: DatabaseTransaction): Promise<void> {
rollback(btx: DatabaseTransaction): void {
if (this.enableTracing) {
console.log(`TRACING: rollback`);
}
@ -1536,6 +1539,57 @@ export class MemoryBackend implements Backend {
await this.afterCommitCallback();
}
}
getObjectStoreMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
): ObjectStoreMeta | undefined {
const conn = this.connections[dbConn.connectionCookie];
if (!conn) {
throw Error("db connection not found");
}
let schema = conn.modifiedSchema;
if (!schema) {
throw Error();
}
const storeInfo = schema.objectStores[objectStoreName];
if (!storeInfo) {
return undefined;
}
return {
autoIncrement: storeInfo.autoIncrement,
indexSet: Object.keys(storeInfo.indexes).sort(),
keyPath: structuredClone(storeInfo.keyPath),
};
}
getIndexMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
indexName: string,
): IndexMeta | undefined {
const conn = this.connections[dbConn.connectionCookie];
if (!conn) {
throw Error("db connection not found");
}
let schema = conn.modifiedSchema;
if (!schema) {
throw Error();
}
const storeInfo = schema.objectStores[objectStoreName];
if (!storeInfo) {
return undefined;
}
const indexInfo = storeInfo.indexes[indexName];
if (!indexInfo) {
return;
}
return {
keyPath: structuredClone(indexInfo.keyPath),
multiEntry: indexInfo.multiEntry,
unique: indexInfo.unique,
};
}
}
function getIndexRecords(req: {
@ -1734,7 +1788,6 @@ function getIndexRecords(req: {
function getObjectStoreRecords(req: {
storeData: ISortedMapF<IDBValidKey, ObjectStoreRecord>;
lastIndexPosition?: IDBValidKey;
forward: boolean;
range: IDBKeyRange;
lastObjectStorePosition?: IDBValidKey;
@ -1743,7 +1796,6 @@ function getObjectStoreRecords(req: {
resultLevel: ResultLevel;
}): RecordGetResponse {
let numResults = 0;
const indexKeys: Key[] = [];
const primaryKeys: Key[] = [];
const values: Value[] = [];
const { storeData, range, forward } = req;
@ -1751,8 +1803,7 @@ function getObjectStoreRecords(req: {
function packResult(): RecordGetResponse {
return {
count: numResults,
indexKeys:
req.resultLevel >= ResultLevel.OnlyKeys ? indexKeys : undefined,
indexKeys: undefined,
primaryKeys:
req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
values: req.resultLevel >= ResultLevel.Full ? values : undefined,
@ -1762,8 +1813,8 @@ function getObjectStoreRecords(req: {
const rangeStart = forward ? range.lower : range.upper;
const dataStart = forward ? storeData.minKey() : storeData.maxKey();
let storePos = req.lastObjectStorePosition;
storePos = furthestKey(forward, storePos, rangeStart);
storePos = furthestKey(forward, storePos, dataStart);
storePos = furthestKey(forward, storePos, rangeStart);
storePos = furthestKey(forward, storePos, req.advancePrimaryKey);
if (storePos != null) {

View File

@ -0,0 +1,83 @@
/*
Copyright 2019 Florian Dold
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
import test from "ava";
import { createSqliteBackend } from "./SqliteBackend.js";
import { ResultLevel, StoreLevel } from "./backend-interface.js";
import { BridgeIDBKeyRange } from "./bridge-idb.js";
import * as fs from "node:fs";
import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
test("sqlite3 backend", async (t) => {
const filename = "mytestdb.sqlite3";
try {
fs.unlinkSync(filename);
} catch (e) {
// Do nothing.
}
try {
const sqlite3Impl = await createNodeSqlite3Impl();
const backend = await createSqliteBackend(sqlite3Impl, {
filename,
});
const dbConnRes = await backend.connectDatabase("mydb");
const dbConn = dbConnRes.conn;
const tx = await backend.enterVersionChange(dbConn, 1);
backend.createObjectStore(tx, "books", "isbn", true);
backend.createIndex(tx, "byName", "books", "name", false, false);
await backend.storeRecord(tx, {
objectStoreName: "books",
storeLevel: StoreLevel.AllowOverwrite,
value: { name: "foo" },
key: undefined,
});
const res = await backend.getObjectStoreRecords(tx, {
direction: "next",
limit: 1,
objectStoreName: "books",
resultLevel: ResultLevel.Full,
range: BridgeIDBKeyRange.only(1),
});
t.deepEqual(res.count, 1);
t.deepEqual(res.primaryKeys![0], 1);
t.deepEqual(res.values![0].name, "foo");
const indexRes = await backend.getIndexRecords(tx, {
direction: "next",
limit: 1,
objectStoreName: "books",
indexName: "byName",
resultLevel: ResultLevel.Full,
range: BridgeIDBKeyRange.only("foo"),
});
t.deepEqual(indexRes.count, 1);
t.deepEqual(indexRes.values![0].isbn, 1);
t.deepEqual(indexRes.values![0].name, "foo");
await backend.commit(tx);
const tx2 = await backend.beginTransaction(dbConn, ["books"], "readwrite");
await backend.commit(tx2);
await backend.close(dbConn);
t.pass();
} catch (e: any) {
console.log(e);
throw e;
}
});

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,29 @@
import { openPromise } from "./util/openPromise.js";
export class AsyncCondition {
_waitPromise: Promise<void>;
_resolveWaitPromise: () => void;
constructor() {
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
wait(): Promise<void> {
return this._waitPromise;
}
trigger(): void {
this._resolveWaitPromise();
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
}
export enum TransactionLevel {
None = 0,
Read = 1,
Write = 2,
VersionChange = 3,
}

View File

@ -21,66 +21,45 @@ import {
IDBValidKey,
} from "./idbtypes.js";
/** @public */
export interface ObjectStoreProperties {
keyPath: string[] | null;
autoIncrement: boolean;
indexes: { [nameame: string]: IndexProperties };
export interface ConnectResult {
conn: DatabaseConnection;
version: number;
objectStores: string[];
}
/** @public */
export interface IndexProperties {
keyPath: string[];
multiEntry: boolean;
unique: boolean;
}
/** @public */
export interface Schema {
databaseName: string;
databaseVersion: number;
objectStores: { [name: string]: ObjectStoreProperties };
}
/** @public */
export interface DatabaseConnection {
connectionCookie: string;
}
/** @public */
export interface DatabaseTransaction {
transactionCookie: string;
}
/** @public */
export enum ResultLevel {
OnlyCount,
OnlyKeys,
Full,
}
/** @public */
export enum StoreLevel {
NoOverwrite,
AllowOverwrite,
UpdateExisting,
}
/** @public */
export interface RecordGetRequest {
export interface IndexGetQuery {
direction: IDBCursorDirection;
objectStoreName: string;
indexName: string | undefined;
indexName: string;
/**
* The range of keys to return.
* If indexName is defined, the range refers to the index keys.
* Otherwise it refers to the object store keys.
* The range refers to the index keys.
*/
range: BridgeIDBKeyRange | undefined | null;
/**
* Last cursor position in terms of the index key.
* Can only be specified if indexName is defined and
* lastObjectStorePosition is defined.
* Can only be specified if lastObjectStorePosition is defined.
*
* Must either be undefined or within range.
*/
@ -92,8 +71,6 @@ export interface RecordGetRequest {
/**
* If specified, the index key of the results must be
* greater or equal to advanceIndexKey.
*
* Only applicable if indexName is specified.
*/
advanceIndexKey?: IDBValidKey;
/**
@ -109,7 +86,31 @@ export interface RecordGetRequest {
resultLevel: ResultLevel;
}
/** @public */
export interface ObjectStoreGetQuery {
direction: IDBCursorDirection;
objectStoreName: string;
/**
* The range of keys to return.
* Refers to the object store keys.
*/
range: BridgeIDBKeyRange | undefined | null;
/**
* Last position in terms of the object store key.
*/
lastObjectStorePosition?: IDBValidKey;
/**
* If specified, the primary key of the results must be greater
* or equal to advancePrimaryKey.
*/
advancePrimaryKey?: IDBValidKey;
/**
* Maximum number of results to return.
* If 0, return all available results
*/
limit: number;
resultLevel: ResultLevel;
}
export interface RecordGetResponse {
values: any[] | undefined;
indexKeys: IDBValidKey[] | undefined;
@ -117,7 +118,6 @@ export interface RecordGetResponse {
count: number;
}
/** @public */
export interface RecordStoreRequest {
objectStoreName: string;
value: any;
@ -125,7 +125,6 @@ export interface RecordStoreRequest {
storeLevel: StoreLevel;
}
/** @public */
export interface RecordStoreResponse {
/**
* Key that the record was stored under in the object store.
@ -133,38 +132,79 @@ export interface RecordStoreResponse {
key: IDBValidKey;
}
/** @public */
export interface ObjectStoreMeta {
indexSet: string[];
keyPath: string | string[] | null;
autoIncrement: boolean;
}
export interface IndexMeta {
keyPath: string | string[];
multiEntry: boolean;
unique: boolean;
}
// FIXME: Instead of refering to an object store by name,
// maybe refer to it via some internal, numeric ID?
// This would simplify renaming.
export interface Backend {
getDatabases(): Promise<BridgeIDBDatabaseInfo[]>;
connectDatabase(name: string): Promise<DatabaseConnection>;
connectDatabase(name: string): Promise<ConnectResult>;
beginTransaction(
conn: DatabaseConnection,
dbConn: DatabaseConnection,
objectStores: string[],
mode: IDBTransactionMode,
): Promise<DatabaseTransaction>;
enterVersionChange(
conn: DatabaseConnection,
dbConn: DatabaseConnection,
newVersion: number,
): Promise<DatabaseTransaction>;
deleteDatabase(name: string): Promise<void>;
close(db: DatabaseConnection): Promise<void>;
close(dbConn: DatabaseConnection): Promise<void>;
getSchema(db: DatabaseConnection): Schema;
// FIXME: Use this for connection
// prepareConnect() - acquires a lock, maybe enters a version change transaction?
// finishConnect() - after possible versionchange is done, allow others to connect
getCurrentTransactionSchema(btx: DatabaseTransaction): Schema;
/**
* Get metadata for an object store.
*
* When dbConn is running a version change transaction,
* the current schema (and not the initial schema) is returned.
*
* Caller may mutate the result, a new object
* is returned on each call.
*/
getObjectStoreMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
): ObjectStoreMeta | undefined;
getInitialTransactionSchema(btx: DatabaseTransaction): Schema;
/**
* Get metadata for an index.
*
* When dbConn is running a version change transaction,
* the current schema (and not the initial schema) is returned.
*
* Caller may mutate the result, a new object
* is returned on each call.
*/
getIndexMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
indexName: string,
): IndexMeta | undefined;
renameIndex(
btx: DatabaseTransaction,
objectStoreName: string,
oldName: string,
newName: string,
oldIndexName: string,
newIndexName: string,
): void;
deleteIndex(
@ -173,8 +213,9 @@ export interface Backend {
indexName: string,
): void;
rollback(btx: DatabaseTransaction): Promise<void>;
rollback(btx: DatabaseTransaction): void;
// FIXME: Should probably not be async
commit(btx: DatabaseTransaction): Promise<void>;
deleteObjectStore(btx: DatabaseTransaction, name: string): void;
@ -207,9 +248,14 @@ export interface Backend {
range: BridgeIDBKeyRange,
): Promise<void>;
getRecords(
getObjectStoreRecords(
btx: DatabaseTransaction,
req: RecordGetRequest,
req: ObjectStoreGetQuery,
): Promise<RecordGetResponse>;
getIndexRecords(
btx: DatabaseTransaction,
req: IndexGetQuery,
): Promise<RecordGetResponse>;
storeRecord(

View File

@ -0,0 +1,740 @@
/*
Copyright 2019 Florian Dold
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
/**
* Tests that are backend-generic.
* See testingdb.ts for the backend selection in test runs.
*/
/**
* Imports.
*/
import test from "ava";
import {
BridgeIDBCursorWithValue,
BridgeIDBDatabase,
BridgeIDBFactory,
BridgeIDBKeyRange,
BridgeIDBTransaction,
} from "./bridge-idb.js";
import {
IDBCursorDirection,
IDBCursorWithValue,
IDBDatabase,
IDBKeyRange,
IDBRequest,
IDBValidKey,
} from "./idbtypes.js";
import { initTestIndexedDB, useTestIndexedDb } from "./testingdb.js";
import { MemoryBackend } from "./MemoryBackend.js";
import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js";
test.before("test DB initialization", initTestIndexedDB);
test("Spec: Example 1 Part 1", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result as BridgeIDBDatabase;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
// Populate with initial data.
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
};
await promiseFromRequest(request);
t.pass();
});
test("Spec: Example 1 Part 2", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
t.pass();
});
test("duplicate index insertion", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
// Change the index key, keep primary key (isbn) the same.
store.put({ title: "Water Buffaloes", author: "Bla", isbn: 234567 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor();
const authorList: string[] = [];
await promiseFromRequest(request3);
while (request3.result != null) {
const cursor: IDBCursorWithValue = request3.result;
authorList.push(cursor.value.author);
cursor.continue();
await promiseFromRequest(request3);
}
t.deepEqual(authorList, ["Barney", "Fred", "Fred"]);
t.pass();
});
test("simple index iteration", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("books", "readwrite");
const store = tx.objectStore("books");
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
await promiseFromTransaction(tx);
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
await promiseFromRequest(request3);
let cursor: BridgeIDBCursorWithValue | null;
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request3);
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 234567);
cursor.continue();
await promiseFromRequest(request3);
t.is(cursor.value, undefined);
});
test("Spec: Example 1 Part 3", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
const tx = db.transaction("books", "readwrite");
const store = tx.objectStore("books");
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readonly");
const store2 = tx2.objectStore("books");
var index2 = store2.index("by_title");
const request2 = index2.get("Bedrock Nights");
const result2: any = await promiseFromRequest(request2);
t.is(result2.author, "Barney");
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
await promiseFromRequest(request3);
let cursor: BridgeIDBCursorWithValue | null;
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request3);
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 234567);
await promiseFromTransaction(tx3);
const tx4 = db.transaction("books", "readonly");
const store4 = tx4.objectStore("books");
const request4 = store4.openCursor();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 234567);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 345678);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
t.is(cursor, null);
const tx5 = db.transaction("books", "readonly");
const store5 = tx5.objectStore("books");
const index5 = store5.index("by_author");
const request5 = index5.openCursor(null, "next");
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
t.is(cursor, null);
const request6 = index5.openCursor(null, "nextunique");
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
t.is(cursor, null);
console.log("---------------------------");
const request7 = index5.openCursor(null, "prevunique");
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
t.is(cursor, null);
db.close();
t.pass();
});
test("simple deletion", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readwrite");
const store2 = tx2.objectStore("books");
const req1 = store2.get(234567);
await promiseFromRequest(req1);
t.is(req1.readyState, "done");
t.is(req1.result.author, "Fred");
store2.delete(123456);
const req2 = store2.get(123456);
await promiseFromRequest(req2);
t.is(req2.readyState, "done");
t.is(req2.result, undefined);
const req3 = store2.get(234567);
await promiseFromRequest(req3);
t.is(req3.readyState, "done");
t.is(req3.result.author, "Fred");
await promiseFromTransaction(tx2);
t.pass();
});
test("export", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname, 42);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const exportedData = backend.exportDump();
const backend2 = new MemoryBackend();
backend2.importDump(exportedData);
const exportedData2 = backend2.exportDump();
t.assert(
exportedData.databases[dbname].objectStores["books"].records.length ===
3,
);
t.deepEqual(exportedData, exportedData2);
t.is(exportedData.databases[dbname].schema.databaseVersion, 42);
t.is(exportedData2.databases[dbname].schema.databaseVersion, 42);
t.pass();
});
test("update with non-existent index values", async (t) => {
const idb = useTestIndexedDb();
const dbname = "mydb-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_z");
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_z");
{
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes, undefined);
}
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
t.pass();
});
test("delete from unique index", async (t) => {
const idb = useTestIndexedDb();
const dbname = "mydb-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result as IDBDatabase;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_yz", ["y", "z"], {
unique: true,
});
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_yz");
const indRes = await promiseFromRequest(index.get(["a", 42]));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42, extra: 123 });
await promiseFromTransaction(tx);
}
t.pass();
});
test("range queries", async (t) => {
const idb = useTestIndexedDb();
const dbname = "mydb-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
store.put({ x: 2, y: "a" });
store.put({ x: 4, y: "b" });
store.put({ x: 8, y: "b" });
store.put({ x: 10, y: "c" });
store.put({ x: 12, y: "c" });
await promiseFromTransaction(tx);
async function doCursorStoreQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const vals: any[] = [];
const req = store.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
async function doCursorIndexQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_y");
const vals: any[] = [];
const req = index.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
await doCursorStoreQuery(undefined, undefined, [
{
x: 0,
y: "a",
},
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
{
x: 12,
y: "c",
},
]);
await doCursorStoreQuery(
BridgeIDBKeyRange.bound(0, 12, true, true),
undefined,
[
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
],
);
await doCursorIndexQuery(
BridgeIDBKeyRange.bound("a", "c", true, true),
undefined,
[
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
],
);
await doCursorIndexQuery(undefined, "nextunique", [
{
x: 0,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 10,
y: "c",
},
]);
await doCursorIndexQuery(undefined, "prevunique", [
{
x: 10,
y: "c",
},
{
x: 4,
y: "b",
},
{
x: 0,
y: "a",
},
]);
db.close();
t.pass();
});

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,7 @@
import test from "ava";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test abort-in-initial-upgradeneeded.htm", async (t) => {
await new Promise<void>((resolve, reject) => {

View File

@ -1,5 +1,7 @@
import test from "ava";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// When db.close is called in upgradeneeded, the db is cleaned up on refresh
test("WPT test close-in-upgradeneeded.htm", (t) => {

View File

@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { IDBRequest } from "../idbtypes.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
const IDBKeyRange = BridgeIDBKeyRange;

View File

@ -2,10 +2,13 @@ import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js";
import {
indexeddb_test,
initTestIndexedDB,
is_transaction_active,
keep_alive,
} from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test abort-in-initial-upgradeneeded.htm (subtest 1)", async (t) => {
// Transactions are active during success handlers
await indexeddb_test(

View File

@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBCursor,BridgeIDBRequest } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test idbcursor_advance_index.htm", async (t) => {
await new Promise<void>((resolve, reject) => {

View File

@ -1,6 +1,9 @@
import test from "ava";
import { BridgeIDBCursor, BridgeIDBCursorWithValue } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
import { IDBDatabase } from "../idbtypes.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test idbcursor_continue_index.htm", (t) => {
return new Promise((resolve, reject) => {
@ -209,7 +212,7 @@ test("WPT idbcursor-continue-index4.htm", (t) => {
// IDBCursor.continue() - index - iterate using 'prevunique'
test("WPT idbcursor-continue-index5.htm", (t) => {
return new Promise((resolve, reject) => {
var db: any;
var db: IDBDatabase;
const records = [
{ pKey: "primaryKey_0", iKey: "indexKey_0" },
{ pKey: "primaryKey_1", iKey: "indexKey_1" },

View File

@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.continue() - object store - iterate to the next record
test("WPT test idbcursor_continue_objectstore.htm", (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava";
import { indexeddb_test } from "./wptsupport.js";
import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT idbcursor-delete-exception-order.htm", async (t) => {
// 'IDBCursor.delete exception order: TransactionInactiveError vs. ReadOnlyError'

View File

@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js";
import { IDBCursor } from "../idbtypes.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.delete() - index - remove a record from the object store
test("WPT idbcursor-delete-index.htm", (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.delete() - object store - remove a record from the object store
test("WPT idbcursor-delete-objectstore.htm", (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT idbcursor-reused.htm", async (t) => {
await new Promise<void>((resolve, reject) => {

View File

@ -3,10 +3,13 @@ import { BridgeIDBCursor, BridgeIDBKeyRange } from "../bridge-idb.js";
import {
createDatabase,
createdb,
initTestIndexedDB,
promiseForRequest,
promiseForTransaction,
} from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.update() - index - modify a record in the object store
test("WPT test idbcursor_update_index.htm", (t) => {
return new Promise((resolve, reject) => {

View File

@ -1,8 +1,10 @@
import test from "ava";
import { idbFactory } from "./wptsupport.js";
import { initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT idbfactory-cmp*.html", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
var greater = indexedDB.cmp(2, 1);
var equal = indexedDB.cmp(2, 2);
var less = indexedDB.cmp(1, 2);

View File

@ -1,7 +1,10 @@
import test from "ava";
import { BridgeIDBVersionChangeEvent } from "../bridge-idb.js";
import FakeEvent from "../util/FakeEvent.js";
import { createdb, format_value, idbFactory } from "./wptsupport.js";
import { createdb, format_value, initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js";
import { IDBDatabase } from "../idbtypes.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBFactory.open() - request has no source
test("WPT idbfactory-open.htm", async (t) => {
@ -36,7 +39,7 @@ test("WPT idbfactory-open2.htm", async (t) => {
// IDBFactory.open() - no version opens current database
test("WPT idbfactory-open3.htm", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var did_upgrade = false;
@ -61,7 +64,6 @@ test("WPT idbfactory-open3.htm", async (t) => {
// IDBFactory.open() - new database has default version
test("WPT idbfactory-open4.htm", async (t) => {
const indexedDB = idbFactory;
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, t.title + "-database_name");
@ -78,7 +80,6 @@ test("WPT idbfactory-open4.htm", async (t) => {
// IDBFactory.open() - new database is empty
test("WPT idbfactory-open5.htm", async (t) => {
const indexedDB = idbFactory;
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, t.title + "-database_name");
@ -97,7 +98,7 @@ test("WPT idbfactory-open5.htm", async (t) => {
// IDBFactory.open() - open database with a lower version than current
test("WPT idbfactory-open6.htm", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var open_rq2: any;
@ -131,7 +132,7 @@ test("WPT idbfactory-open6.htm", async (t) => {
// IDBFactory.open() - open database with a higher version than current
test("WPT idbfactory-open7.htm", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var did_upgrade = false;
@ -169,7 +170,7 @@ test("WPT idbfactory-open7.htm", async (t) => {
// IDBFactory.open() - error in version change transaction aborts open
test("WPT idbfactory-open8.htm", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13);
var did_upgrade = false;
@ -193,7 +194,7 @@ test("WPT idbfactory-open8.htm", async (t) => {
// IDBFactory.open() - errors in version argument
test("WPT idbfactory-open9.htm", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
function should_throw(val: any, name?: string) {
if (!name) {
name = typeof val == "object" && val ? "object" : format_value(val);
@ -281,9 +282,9 @@ test("WPT idbfactory-open9.htm", async (t) => {
// IDBFactory.open() - error in version change transaction aborts open
test("WPT idbfactory-open10.htm", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var db: any, db2: any;
var db: IDBDatabase, db2: IDBDatabase;
var open_rq = createdb(t, undefined, 9);
open_rq.onupgradeneeded = function (e: any) {
@ -350,7 +351,7 @@ test("WPT idbfactory-open10.htm", async (t) => {
var open_rq3 = indexedDB.open(db.name);
open_rq3.onsuccess = function (e: any) {
var db3 = e.target.result;
var db3: IDBDatabase = e.target.result;
t.true(
db3.objectStoreNames.contains("store"),
@ -407,7 +408,7 @@ test("WPT idbfactory-open10.htm", async (t) => {
// IDBFactory.open() - second open's transaction is available to get objectStores
test("WPT idbfactory-open11.htm", async (t) => {
const indexedDB = idbFactory;
const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => {
var db: any;
var count_done = 0;
@ -472,8 +473,6 @@ test("WPT idbfactory-open11.htm", async (t) => {
// IDBFactory.open() - upgradeneeded gets VersionChangeEvent
test("WPT idbfactory-open12.htm", async (t) => {
const indexedDB = idbFactory;
var db: any;
var open_rq = createdb(t, undefined, 9);

View File

@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBIndex.get() - returns the record
test("WPT idbindex_get.htm", async (t) => {
@ -93,7 +95,7 @@ test("WPT idbindex_get3.htm", async (t) => {
// IDBIndex.get() - returns the record with the first key in the range
test("WPT idbindex_get4.htm", async (t) => {
await new Promise<void>((resolve, reject) => {
var db: any;
var db: IDBDatabase;
var open_rq = createdb(t);

View File

@ -1,5 +1,7 @@
import test from "ava";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBIndex.openCursor() - throw InvalidStateError when the index is deleted
test("WPT test idbindex-openCursor.htm", (t) => {

View File

@ -1,5 +1,7 @@
import test, { ExecutionContext } from "ava";
import { indexeddb_test } from "./wptsupport.js";
import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
async function t1(t: ExecutionContext, method: string): Promise<void> {
await indexeddb_test(
@ -55,8 +57,6 @@ async function t2(t: ExecutionContext, method: string): Promise<void> {
done();
}, 0);
console.log(`queued task for ${method}`);
},
"t2",
);

View File

@ -1,7 +1,9 @@
import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.add() - add with an inline key
test("WPT idbobjectstore_add.htm", async (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.get() - key is a number
test("WPT idbobjectstore_get.htm", (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.put() - put with an inline key
test("WPT idbobjectstore_put.htm", (t) => {

View File

@ -6,9 +6,12 @@ import {
createBooksStore,
createDatabase,
createNotBooksStore,
initTestIndexedDB,
migrateDatabase,
} from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IndexedDB: object store renaming support
// IndexedDB object store rename in new transaction
test("WPT idbobjectstore-rename-store.html (subtest 1)", async (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBTransaction - complete event
test("WPT idbtransaction-oncomplete.htm", async (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava";
import { assert_key_equals, createdb } from "./wptsupport.js";
import { assert_key_equals, createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test keypath.htm", async (t) => {
function keypath(
@ -9,8 +11,6 @@ test("WPT test keypath.htm", async (t) => {
desc?: string,
) {
return new Promise<void>((resolve, reject) => {
console.log("key path", keypath);
console.log("checking", desc);
let db: any;
const store_name = "store-" + Date.now() + Math.random();

View File

@ -1,6 +1,8 @@
import test from "ava";
import { EventTarget } from "../idbtypes.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// Bubbling and capturing of request events
test("WPT request_bubble-and-capture.htm", async (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// Transactions have a request queue
test("transaction-requestqueue.htm", async (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava";
import { IDBVersionChangeEvent } from "../idbtypes.js";
import { createdb } from "./wptsupport.js";
import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test value.htm, array", (t) => {
return new Promise((resolve, reject) => {
@ -12,7 +14,6 @@ test("WPT test value.htm, array", (t) => {
createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) {
(e.target as any).result.createObjectStore("store").add(value, 1);
(e.target as any).onsuccess = (e: any) => {
console.log("in first onsuccess");
e.target.result
.transaction("store")
.objectStore("store")
@ -35,13 +36,10 @@ test("WPT test value.htm, date", (t) => {
createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) {
(e.target as any).result.createObjectStore("store").add(value, 1);
(e.target as any).onsuccess = (e: any) => {
console.log("in first onsuccess");
e.target.result
.transaction("store")
.objectStore("store")
.get(1).onsuccess = (e: any) => {
console.log("target", e.target);
console.log("result", e.target.result);
t.assert(e.target.result instanceof _instanceof, "instanceof");
resolve();
};

View File

@ -1,5 +1,5 @@
import { ExecutionContext } from "ava";
import { BridgeIDBFactory, BridgeIDBRequest } from "../bridge-idb.js";
import { BridgeIDBRequest } from "../bridge-idb.js";
import {
IDBDatabase,
IDBIndex,
@ -8,17 +8,10 @@ import {
IDBRequest,
IDBTransaction,
} from "../idbtypes.js";
import { MemoryBackend } from "../MemoryBackend.js";
import { initTestIndexedDB , useTestIndexedDb } from "../testingdb.js";
import { compareKeys } from "../util/cmp.js";
BridgeIDBFactory.enableTracing = true;
const backend = new MemoryBackend();
backend.enableTracing = true;
export const idbFactory = new BridgeIDBFactory(backend);
const self = {
indexedDB: idbFactory,
};
export { initTestIndexedDB, useTestIndexedDb } from "../testingdb.js"
export function createdb(
t: ExecutionContext<unknown>,
@ -27,8 +20,8 @@ export function createdb(
): IDBOpenDBRequest {
var rq_open: IDBOpenDBRequest;
dbname = dbname ? dbname : "testdb-" + new Date().getTime() + Math.random();
if (version) rq_open = self.indexedDB.open(dbname, version);
else rq_open = self.indexedDB.open(dbname);
if (version) rq_open = useTestIndexedDb().open(dbname, version);
else rq_open = useTestIndexedDb().open(dbname);
return rq_open;
}
@ -111,7 +104,7 @@ export async function migrateNamedDatabase(
migrationCallback: MigrationCallback,
): Promise<IDBDatabase> {
return new Promise<IDBDatabase>((resolve, reject) => {
const request = self.indexedDB.open(databaseName, newVersion);
const request = useTestIndexedDb().open(databaseName, newVersion);
request.onupgradeneeded = (event: any) => {
const database = event.target.result;
const transaction = event.target.transaction;
@ -175,7 +168,7 @@ export async function createDatabase(
setupCallback: MigrationCallback,
): Promise<IDBDatabase> {
const databaseName = makeDatabaseName(t.title);
const request = self.indexedDB.deleteDatabase(databaseName);
const request = useTestIndexedDb().deleteDatabase(databaseName);
return migrateNamedDatabase(t, databaseName, 1, setupCallback);
}
@ -463,9 +456,9 @@ export function indexeddb_test(
options = Object.assign({ upgrade_will_abort: false }, options);
const dbname =
"testdb-" + new Date().getTime() + Math.random() + (dbsuffix ?? "");
var del = self.indexedDB.deleteDatabase(dbname);
var del = useTestIndexedDb().deleteDatabase(dbname);
del.onerror = () => t.fail("deleteDatabase should succeed");
var open = self.indexedDB.open(dbname, 1);
var open = useTestIndexedDb().open(dbname, 1);
open.onupgradeneeded = function () {
var db = open.result;
t.teardown(function () {
@ -474,7 +467,7 @@ export function indexeddb_test(
e.preventDefault();
};
db.close();
self.indexedDB.deleteDatabase(db.name);
useTestIndexedDb().deleteDatabase(db.name);
});
var tx = open.transaction!;
upgrade_func(resolve, db, tx, open);

View File

@ -0,0 +1,26 @@
import { BridgeIDBTransaction } from "./bridge-idb.js";
import { IDBRequest } from "./idbtypes.js";
export function promiseFromRequest(request: IDBRequest): Promise<any> {
return new Promise((resolve, reject) => {
request.onsuccess = () => {
resolve(request.result);
};
request.onerror = () => {
reject(request.error);
};
});
}
export function promiseFromTransaction(
transaction: BridgeIDBTransaction,
): Promise<void> {
return new Promise<void>((resolve, reject) => {
transaction.oncomplete = () => {
resolve();
};
transaction.onerror = () => {
reject();
};
});
}

View File

@ -19,48 +19,27 @@ and limitations under the License.
* Instead of ambient types, we export type declarations.
*/
/**
* @public
*/
export type IDBKeyPath = string;
/**
* @public
*/
export interface EventListener {
(evt: Event): void;
}
/**
* @public
*/
export interface EventListenerObject {
handleEvent(evt: Event): void;
}
/**
* @public
*/
export interface EventListenerOptions {
capture?: boolean;
}
/**
* @public
*/
export interface AddEventListenerOptions extends EventListenerOptions {
once?: boolean;
passive?: boolean;
}
/**
* @public
*/
export type IDBTransactionMode = "readonly" | "readwrite" | "versionchange";
/**
* @public
*/
export type EventListenerOrEventListenerObject =
| EventListener
| EventListenerObject;
@ -68,8 +47,6 @@ export type EventListenerOrEventListenerObject =
/**
* EventTarget is a DOM interface implemented by objects that can receive
* events and may have listeners for them.
*
* @public
*/
export interface EventTarget {
/**

View File

@ -2,14 +2,10 @@ import {
Backend,
DatabaseConnection,
DatabaseTransaction,
IndexProperties,
ObjectStoreProperties,
RecordGetRequest,
RecordGetResponse,
RecordStoreRequest,
RecordStoreResponse,
ResultLevel,
Schema,
StoreLevel,
} from "./backend-interface.js";
import {
@ -36,6 +32,9 @@ import {
} from "./MemoryBackend.js";
import { Listener } from "./util/FakeEventTarget.js";
export * from "./SqliteBackend.js";
export * from "./sqlite3-interface.js";
export * from "./idbtypes.js";
export { MemoryBackend } from "./MemoryBackend.js";
export type { AccessStats } from "./MemoryBackend.js";
@ -55,21 +54,17 @@ export {
};
export type {
DatabaseTransaction,
RecordGetRequest,
RecordGetResponse,
Schema,
Backend,
DatabaseList,
RecordStoreRequest,
RecordStoreResponse,
DatabaseConnection,
ObjectStoreProperties,
RequestObj,
DatabaseDump,
ObjectStoreDump,
IndexRecord,
ObjectStoreRecord,
IndexProperties,
MemoryBackendDump,
Event,
Listener,

View File

@ -0,0 +1,84 @@
/*
This file is part of GNU Taler
(C) 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
// @ts-ignore: optional dependency
import type Database from "better-sqlite3";
import {
ResultRow,
Sqlite3Interface,
Sqlite3Statement,
} from "./sqlite3-interface.js";
export async function createNodeSqlite3Impl(): Promise<Sqlite3Interface> {
// @ts-ignore: optional dependency
const bsq = (await import("better-sqlite3")).default;
return {
open(filename: string) {
const internalDbHandle = bsq(filename);
return {
internalDbHandle,
close() {
internalDbHandle.close();
},
prepare(stmtStr): Sqlite3Statement {
const stmtHandle = internalDbHandle.prepare(stmtStr);
return {
internalStatement: stmtHandle,
getAll(params): ResultRow[] {
let res: ResultRow[];
if (params === undefined) {
res = stmtHandle.all() as ResultRow[];
} else {
res = stmtHandle.all(params) as ResultRow[];
}
return res;
},
getFirst(params): ResultRow | undefined {
let res: ResultRow | undefined;
if (params === undefined) {
res = stmtHandle.get() as ResultRow | undefined;
} else {
res = stmtHandle.get(params) as ResultRow | undefined;
}
return res;
},
run(params) {
const myParams = [];
if (params !== undefined) {
myParams.push(params);
}
// The better-sqlite3 library doesn't like it we pass
// undefined directly.
let res: Database.RunResult;
if (params !== undefined) {
res = stmtHandle.run(params);
} else {
res = stmtHandle.run();
}
return {
lastInsertRowid: res.lastInsertRowid,
};
},
};
},
exec(sqlStr): void {
internalDbHandle.exec(sqlStr);
},
};
},
};
}

View File

@ -0,0 +1,34 @@
export type Sqlite3Database = {
internalDbHandle: any;
exec(sqlStr: string): void;
prepare(stmtStr: string): Sqlite3Statement;
close(): void;
};
export type Sqlite3Statement = {
internalStatement: any;
run(params?: BindParams): RunResult;
getAll(params?: BindParams): ResultRow[];
getFirst(params?: BindParams): ResultRow | undefined;
};
export interface RunResult {
lastInsertRowid: number | bigint;
}
export type Sqlite3Value = string | Uint8Array | number | null | bigint;
export type BindParams = Record<string, Sqlite3Value | undefined>;
export type ResultRow = Record<string, Sqlite3Value>;
/**
* Common interface that multiple sqlite3 bindings
* (such as better-sqlite3 or qtart's sqlite3 bindings)
* can adapt to.
*
* This does not expose full sqlite3 functionality, but just enough
* to be used by our IndexedDB sqlite3 backend.
*/
export interface Sqlite3Interface {
open(filename: string): Sqlite3Database;
}

View File

@ -0,0 +1,43 @@
/*
Copyright 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
import { createSqliteBackend } from "./SqliteBackend.js";
import { BridgeIDBFactory } from "./bridge-idb.js";
import { IDBFactory } from "./idbtypes.js";
import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
let idbFactory: IDBFactory | undefined = undefined;
export async function initTestIndexedDB(): Promise<void> {
// const backend = new MemoryBackend();
// backend.enableTracing = true;
const sqlite3Impl = await createNodeSqlite3Impl();
const backend = await createSqliteBackend(sqlite3Impl, {
filename: ":memory:",
});
idbFactory = new BridgeIDBFactory(backend);
backend.enableTracing = true;
BridgeIDBFactory.enableTracing = false;
}
export function useTestIndexedDb(): IDBFactory {
if (!idbFactory) {
throw Error("indexeddb factory not initialized");
}
return idbFactory;
}

View File

@ -0,0 +1,103 @@
/*
Copyright 2017 Jeremy Scheff
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
import FakeEventTarget from "./FakeEventTarget.js";
import { Event, EventTarget } from "../idbtypes.js";
/** @public */
export type EventType =
| "abort"
| "blocked"
| "complete"
| "error"
| "success"
| "upgradeneeded"
| "versionchange";
export class FakeDomEvent implements Event {
public eventPath: FakeEventTarget[] = [];
public type: EventType;
public readonly NONE = 0;
public readonly CAPTURING_PHASE = 1;
public readonly AT_TARGET = 2;
public readonly BUBBLING_PHASE = 3;
// Flags
public propagationStopped = false;
public immediatePropagationStopped = false;
public canceled = false;
public initialized = true;
public dispatched = false;
public target: FakeEventTarget | null = null;
public currentTarget: FakeEventTarget | null = null;
public eventPhase: 0 | 1 | 2 | 3 = 0;
public defaultPrevented = false;
public isTrusted = false;
public timeStamp = Date.now();
public bubbles: boolean;
public cancelable: boolean;
constructor(
type: EventType,
eventInitDict: { bubbles?: boolean; cancelable?: boolean } = {},
) {
this.type = type;
this.bubbles =
eventInitDict.bubbles !== undefined ? eventInitDict.bubbles : false;
this.cancelable =
eventInitDict.cancelable !== undefined ? eventInitDict.cancelable : false;
}
cancelBubble: boolean = false;
composed: boolean = false;
returnValue: boolean = false;
get srcElement(): EventTarget | null {
return this.target;
}
composedPath(): EventTarget[] {
throw new Error("Method not implemented.");
}
initEvent(
type: string,
bubbles?: boolean | undefined,
cancelable?: boolean | undefined,
): void {
throw new Error("Method not implemented.");
}
public preventDefault() {
if (this.cancelable) {
this.canceled = true;
}
}
public stopPropagation() {
this.propagationStopped = true;
}
public stopImmediatePropagation() {
this.propagationStopped = true;
this.immediatePropagationStopped = true;
}
}
export default FakeDomEvent;

View File

@ -180,7 +180,7 @@ abstract class FakeEventTarget implements EventTarget {
fe.eventPath.reverse();
fe.eventPhase = event.BUBBLING_PHASE;
if (fe.eventPath.length === 0 && event.type === "error") {
console.error("Unhandled error event: ", event.target);
console.error("Unhandled error event on target: ", event.target);
}
for (const obj of event.eventPath) {
if (!event.propagationStopped) {

View File

@ -19,7 +19,11 @@ import { IDBKeyPath, IDBValidKey } from "../idbtypes.js";
import { valueToKey } from "./valueToKey.js";
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#dfn-steps-for-extracting-a-key-from-a-value-using-a-key-path
/**
* Algorithm to "extract a key from a value using a key path".
*/
export const extractKey = (keyPath: IDBKeyPath | IDBKeyPath[], value: any) => {
//console.log(`extracting key ${JSON.stringify(keyPath)} from ${JSON.stringify(value)}`);
if (Array.isArray(keyPath)) {
const result: IDBValidKey[] = [];

View File

@ -0,0 +1,39 @@
/*
This file is part of GNU Taler
(C) 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
import test, { ExecutionContext } from "ava";
import { deserializeKey, serializeKey } from "./key-storage.js";
import { IDBValidKey } from "../idbtypes.js";
function checkKeySer(t: ExecutionContext, k: IDBValidKey): void {
const keyEnc = serializeKey(k);
const keyDec = deserializeKey(keyEnc);
t.deepEqual(k, keyDec);
}
test("basics", (t) => {
checkKeySer(t, "foo");
checkKeySer(t, "foo\0bar");
checkKeySer(t, "foo\u1000bar");
checkKeySer(t, "foo\u2000bar");
checkKeySer(t, "foo\u5000bar");
checkKeySer(t, "foo\uffffbar");
checkKeySer(t, 42);
checkKeySer(t, 255);
checkKeySer(t, 254);
checkKeySer(t, [1, 2, 3, 4]);
checkKeySer(t, [[[1], 3], [4]]);
});

View File

@ -0,0 +1,363 @@
/*
This file is part of GNU Taler
(C) 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
/*
Encoding rules (inspired by Firefox, but slightly simplified):
Numbers: 0x10 n n n n n n n n
Dates: 0x20 n n n n n n n n
Strings: 0x30 s s s s ... 0
Binaries: 0x40 s s s s ... 0
Arrays: 0x50 i i i ... 0
Numbers/dates are encoded as 64-bit IEEE 754 floats with the sign bit
flipped, in order to make them sortable.
*/
/**
* Imports.
*/
import { IDBValidKey } from "../idbtypes.js";
const tagNum = 0xa0;
const tagDate = 0xb0;
const tagString = 0xc0;
const tagBinary = 0xc0;
const tagArray = 0xe0;
const oneByteOffset = 0x01;
const twoByteOffset = 0x7f;
const oneByteMax = 0x7e;
const twoByteMax = 0x3fff + twoByteOffset;
const twoByteMask = 0b1000_0000;
const threeByteMask = 0b1100_0000;
export function countEncSize(c: number): number {
if (c > twoByteMax) {
return 3;
}
if (c > oneByteMax) {
return 2;
}
return 1;
}
export function writeEnc(dv: DataView, offset: number, c: number): number {
if (c > twoByteMax) {
dv.setUint8(offset + 2, (c & 0xff) << 6);
dv.setUint8(offset + 1, (c >>> 2) & 0xff);
dv.setUint8(offset, threeByteMask | (c >>> 10));
return 3;
} else if (c > oneByteMax) {
c -= twoByteOffset;
dv.setUint8(offset + 1, c & 0xff);
dv.setUint8(offset, (c >>> 8) | twoByteMask);
return 2;
} else {
c += oneByteOffset;
dv.setUint8(offset, c);
return 1;
}
}
export function internalSerializeString(
dv: DataView,
offset: number,
key: string,
): number {
dv.setUint8(offset, tagString);
let n = 1;
for (let i = 0; i < key.length; i++) {
let c = key.charCodeAt(i);
n += writeEnc(dv, offset + n, c);
}
// Null terminator
dv.setUint8(offset + n, 0);
n++;
return n;
}
export function countSerializeKey(key: IDBValidKey): number {
if (typeof key === "number") {
return 9;
}
if (key instanceof Date) {
return 9;
}
if (key instanceof ArrayBuffer) {
let len = 2;
const uv = new Uint8Array(key);
for (let i = 0; i < uv.length; i++) {
len += countEncSize(uv[i]);
}
return len;
}
if (ArrayBuffer.isView(key)) {
let len = 2;
const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
for (let i = 0; i < uv.length; i++) {
len += countEncSize(uv[i]);
}
return len;
}
if (typeof key === "string") {
let len = 2;
for (let i = 0; i < key.length; i++) {
len += countEncSize(key.charCodeAt(i));
}
return len;
}
if (Array.isArray(key)) {
let len = 2;
for (let i = 0; i < key.length; i++) {
len += countSerializeKey(key[i]);
}
return len;
}
throw Error("unsupported type for key");
}
function internalSerializeNumeric(
dv: DataView,
offset: number,
tag: number,
val: number,
): number {
dv.setUint8(offset, tagNum);
dv.setFloat64(offset + 1, val);
// Flip sign bit
let b = dv.getUint8(offset + 1);
b ^= 0x80;
dv.setUint8(offset + 1, b);
return 9;
}
function internalSerializeArray(
dv: DataView,
offset: number,
key: any[],
): number {
dv.setUint8(offset, tagArray);
let n = 1;
for (let i = 0; i < key.length; i++) {
n += internalSerializeKey(key[i], dv, offset + n);
}
dv.setUint8(offset + n, 0);
n++;
return n;
}
function internalSerializeBinary(
dv: DataView,
offset: number,
key: Uint8Array,
): number {
dv.setUint8(offset, tagBinary);
let n = 1;
for (let i = 0; i < key.length; i++) {
n += internalSerializeKey(key[i], dv, offset + n);
}
dv.setUint8(offset + n, 0);
n++;
return n;
}
function internalSerializeKey(
key: IDBValidKey,
dv: DataView,
offset: number,
): number {
if (typeof key === "number") {
return internalSerializeNumeric(dv, offset, tagNum, key);
}
if (key instanceof Date) {
return internalSerializeNumeric(dv, offset, tagDate, key.getDate());
}
if (typeof key === "string") {
return internalSerializeString(dv, offset, key);
}
if (Array.isArray(key)) {
return internalSerializeArray(dv, offset, key);
}
if (key instanceof ArrayBuffer) {
return internalSerializeBinary(dv, offset, new Uint8Array(key));
}
if (ArrayBuffer.isView(key)) {
const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
return internalSerializeBinary(dv, offset, uv);
}
throw Error("unsupported type for key");
}
export function serializeKey(key: IDBValidKey): Uint8Array {
const len = countSerializeKey(key);
let buf = new Uint8Array(len);
const outLen = internalSerializeKey(key, new DataView(buf.buffer), 0);
if (len != outLen) {
throw Error("internal invariant failed");
}
let numTrailingZeroes = 0;
for (let i = buf.length - 1; i >= 0 && buf[i] == 0; i--, numTrailingZeroes++);
if (numTrailingZeroes > 0) {
buf = buf.slice(0, buf.length - numTrailingZeroes);
}
return buf;
}
function internalReadString(dv: DataView, offset: number): [number, string] {
const chars: string[] = [];
while (offset < dv.byteLength) {
const v = dv.getUint8(offset);
if (v == 0) {
// Got end-of-string.
offset += 1;
break;
}
let c: number;
if ((v & threeByteMask) === threeByteMask) {
const b1 = v;
const b2 = dv.getUint8(offset + 1);
const b3 = dv.getUint8(offset + 2);
c = (b1 << 10) | (b2 << 2) | (b3 >> 6);
offset += 3;
} else if ((v & twoByteMask) === twoByteMask) {
const b1 = v & ~twoByteMask;
const b2 = dv.getUint8(offset + 1);
c = ((b1 << 8) | b2) + twoByteOffset;
offset += 2;
} else {
c = v - oneByteOffset;
offset += 1;
}
chars.push(String.fromCharCode(c));
}
return [offset, chars.join("")];
}
function internalReadBytes(dv: DataView, offset: number): [number, Uint8Array] {
let count = 0;
while (offset + count < dv.byteLength) {
const v = dv.getUint8(offset + count);
if (v === 0) {
break;
}
count++;
}
let writePos = 0;
const bytes = new Uint8Array(count);
while (offset < dv.byteLength) {
const v = dv.getUint8(offset);
if (v == 0) {
offset += 1;
break;
}
let c: number;
if ((v & threeByteMask) === threeByteMask) {
const b1 = v;
const b2 = dv.getUint8(offset + 1);
const b3 = dv.getUint8(offset + 2);
c = (b1 << 10) | (b2 << 2) | (b3 >> 6);
offset += 3;
} else if ((v & twoByteMask) === twoByteMask) {
const b1 = v & ~twoByteMask;
const b2 = dv.getUint8(offset + 1);
c = ((b1 << 8) | b2) + twoByteOffset;
offset += 2;
} else {
c = v - oneByteOffset;
offset += 1;
}
bytes[writePos] = c;
writePos++;
}
return [offset, bytes];
}
/**
* Same as DataView.getFloat64, but logically pad input
* with zeroes on the right if read offset would be out
* of bounds.
*
* This allows reading from buffers where zeros have been
* truncated.
*/
function getFloat64Trunc(dv: DataView, offset: number): number {
if (offset + 7 >= dv.byteLength) {
const buf = new Uint8Array(8);
for (let i = offset; i < dv.byteLength; i++) {
buf[i - offset] = dv.getUint8(i);
}
const dv2 = new DataView(buf.buffer);
return dv2.getFloat64(0);
} else {
return dv.getFloat64(offset);
}
}
function internalDeserializeKey(
dv: DataView,
offset: number,
): [number, IDBValidKey] {
let tag = dv.getUint8(offset);
switch (tag) {
case tagNum: {
const num = -getFloat64Trunc(dv, offset + 1);
const newOffset = Math.min(offset + 9, dv.byteLength);
return [newOffset, num];
}
case tagDate: {
const num = -getFloat64Trunc(dv, offset + 1);
const newOffset = Math.min(offset + 9, dv.byteLength);
return [newOffset, new Date(num)];
}
case tagString: {
return internalReadString(dv, offset + 1);
}
case tagBinary: {
return internalReadBytes(dv, offset + 1);
}
case tagArray: {
const arr: any[] = [];
offset += 1;
while (offset < dv.byteLength) {
const innerTag = dv.getUint8(offset);
if (innerTag === 0) {
offset++;
break;
}
const [innerOff, innerVal] = internalDeserializeKey(dv, offset);
arr.push(innerVal);
offset = innerOff;
}
return [offset, arr];
}
default:
throw Error("invalid key (unrecognized tag)");
}
}
export function deserializeKey(encodedKey: Uint8Array): IDBValidKey {
const dv = new DataView(
encodedKey.buffer,
encodedKey.byteOffset,
encodedKey.byteLength,
);
let [off, res] = internalDeserializeKey(dv, 0);
if (off != encodedKey.byteLength) {
throw Error("internal invariant failed");
}
return res;
}

View File

@ -20,55 +20,73 @@ import { makeStoreKeyValue } from "./makeStoreKeyValue.js";
test("basics", (t) => {
let result;
result = makeStoreKeyValue({ name: "Florian" }, undefined, 42, true, "id");
result = makeStoreKeyValue({
value: { name: "Florian" },
key: undefined,
currentKeyGenerator: 42,
autoIncrement: true,
keyPath: "id",
});
t.is(result.updatedKeyGenerator, 43);
t.is(result.key, 42);
t.is(result.value.name, "Florian");
t.is(result.value.id, 42);
result = makeStoreKeyValue(
{ name: "Florian", id: 10 },
undefined,
5,
true,
"id",
);
result = makeStoreKeyValue({
value: { name: "Florian", id: 10 },
key: undefined,
currentKeyGenerator: 5,
autoIncrement: true,
keyPath: "id",
});
t.is(result.updatedKeyGenerator, 11);
t.is(result.key, 10);
t.is(result.value.name, "Florian");
t.is(result.value.id, 10);
result = makeStoreKeyValue(
{ name: "Florian", id: 5 },
undefined,
10,
true,
"id",
);
result = makeStoreKeyValue({
value: { name: "Florian", id: 5 },
key: undefined,
currentKeyGenerator: 10,
autoIncrement: true,
keyPath: "id",
});
t.is(result.updatedKeyGenerator, 10);
t.is(result.key, 5);
t.is(result.value.name, "Florian");
t.is(result.value.id, 5);
result = makeStoreKeyValue(
{ name: "Florian", id: "foo" },
undefined,
10,
true,
"id",
);
result = makeStoreKeyValue({
value: { name: "Florian", id: "foo" },
key: undefined,
currentKeyGenerator: 10,
autoIncrement: true,
keyPath: "id",
});
t.is(result.updatedKeyGenerator, 10);
t.is(result.key, "foo");
t.is(result.value.name, "Florian");
t.is(result.value.id, "foo");
result = makeStoreKeyValue({ name: "Florian" }, "foo", 10, true, null);
result = makeStoreKeyValue({
value: { name: "Florian" },
key: "foo",
currentKeyGenerator: 10,
autoIncrement: true,
keyPath: null,
});
t.is(result.updatedKeyGenerator, 10);
t.is(result.key, "foo");
t.is(result.value.name, "Florian");
t.is(result.value.id, undefined);
result = makeStoreKeyValue({ name: "Florian" }, undefined, 10, true, null);
result = makeStoreKeyValue({
value: { name: "Florian" },
key: undefined,
currentKeyGenerator: 10,
autoIncrement: true,
keyPath: null,
});
t.is(result.updatedKeyGenerator, 11);
t.is(result.key, 10);
t.is(result.value.name, "Florian");

View File

@ -75,19 +75,25 @@ function injectKey(
return newValue;
}
export function makeStoreKeyValue(
value: any,
key: IDBValidKey | undefined,
currentKeyGenerator: number,
autoIncrement: boolean,
keyPath: IDBKeyPath | IDBKeyPath[] | null,
): StoreKeyResult {
export interface MakeStoreKvRequest {
value: any;
key: IDBValidKey | undefined;
currentKeyGenerator: number;
autoIncrement: boolean;
keyPath: IDBKeyPath | IDBKeyPath[] | null;
}
export function makeStoreKeyValue(req: MakeStoreKvRequest): StoreKeyResult {
const { keyPath, currentKeyGenerator, autoIncrement } = req;
let { key, value } = req;
const haveKey = key !== null && key !== undefined;
const haveKeyPath = keyPath !== null && keyPath !== undefined;
// This models a decision table on (haveKey, haveKeyPath, autoIncrement)
try {
// FIXME: Perf: only do this if we need to inject something.
value = structuredClone(value);
} catch (e) {
throw new DataCloneError();

View File

@ -14,6 +14,11 @@
permissions and limitations under the License.
*/
/**
* Queue a task to be executed *after* the microtask
* queue has been processed, but *before* subsequent setTimeout / setImmediate
* tasks.
*/
export function queueTask(fn: () => void) {
let called = false;
const callFirst = () => {

View File

@ -15,7 +15,11 @@
*/
import test, { ExecutionContext } from "ava";
import { structuredClone } from "./structuredClone.js";
import {
structuredClone,
structuredEncapsulate,
structuredRevive,
} from "./structuredClone.js";
function checkClone(t: ExecutionContext, x: any): void {
t.deepEqual(structuredClone(x), x);
@ -59,3 +63,58 @@ test("structured clone (object cycles)", (t) => {
const obj1Clone = structuredClone(obj1);
t.is(obj1Clone, obj1Clone.c);
});
test("encapsulate", (t) => {
t.deepEqual(structuredEncapsulate(42), 42);
t.deepEqual(structuredEncapsulate(true), true);
t.deepEqual(structuredEncapsulate(false), false);
t.deepEqual(structuredEncapsulate(null), null);
t.deepEqual(structuredEncapsulate(undefined), { $: "undef" });
t.deepEqual(structuredEncapsulate(42n), { $: "bigint", val: "42" });
t.deepEqual(structuredEncapsulate(new Date(42)), { $: "date", val: 42 });
t.deepEqual(structuredEncapsulate({ x: 42 }), { x: 42 });
t.deepEqual(structuredEncapsulate({ $: "bla", x: 42 }), {
$: "obj",
val: { $: "bla", x: 42 },
});
const x = { foo: 42, bar: {} } as any;
x.bar.baz = x;
t.deepEqual(structuredEncapsulate(x), {
foo: 42,
bar: {
baz: { $: "ref", d: 2, p: [] },
},
});
});
test("revive", (t) => {
t.deepEqual(structuredRevive(42), 42);
t.deepEqual(structuredRevive([1, 2, 3]), [1, 2, 3]);
t.deepEqual(structuredRevive(true), true);
t.deepEqual(structuredRevive(false), false);
t.deepEqual(structuredRevive(null), null);
t.deepEqual(structuredRevive({ $: "undef" }), undefined);
t.deepEqual(structuredRevive({ x: { $: "undef" } }), { x: undefined });
t.deepEqual(structuredRevive({ $: "date", val: 42}), new Date(42));
{
const x = { foo: 42, bar: {} } as any;
x.bar.baz = x;
const r = {
foo: 42,
bar: {
baz: { $: "ref", d: 2, p: [] },
},
};
t.deepEqual(structuredRevive(r), x);
}
});

View File

@ -18,20 +18,19 @@
* Encoding (new, compositional version):
*
* Encapsulate object that itself might contain a "$" field:
* { $: { E... } }
* { $: "obj", val: ... }
* (Outer level only:) Wrap other values into object
* { $: "lit", val: ... }
* Circular reference:
* { $: ["ref", uplevel, field...] }
* { $: "ref" l: uplevel, p: path }
* Date:
* { $: ["data"], val: datestr }
* { $: "date", val: datestr }
* Bigint:
* { $: ["bigint"], val: bigintstr }
* { $: "bigint", val: bigintstr }
* Array with special (non-number) attributes:
* { $: ["array"], val: arrayobj }
* { $: "array", val: arrayobj }
* Undefined field
* { $: "undef" }
*
* Legacy (top-level only), for backwards compatibility:
* { $types: [...] }
*/
/**
@ -261,22 +260,18 @@ export function mkDeepCloneCheckOnly() {
function internalEncapsulate(
val: any,
outRoot: any,
path: string[],
memo: Map<any, string[]>,
types: Array<[string[], string]>,
): any {
const memoPath = memo.get(val);
if (memoPath) {
types.push([path, "ref"]);
return memoPath;
return { $: "ref", d: path.length, p: memoPath };
}
if (val === null) {
return null;
}
if (val === undefined) {
types.push([path, "undef"]);
return 0;
return { $: "undef" };
}
if (Array.isArray(val)) {
memo.set(val, path);
@ -289,31 +284,33 @@ function internalEncapsulate(
break;
}
}
if (special) {
types.push([path, "array"]);
}
for (const x in val) {
const p = [...path, x];
outArr[x] = internalEncapsulate(val[x], outRoot, p, memo, types);
outArr[x] = internalEncapsulate(val[x], p, memo);
}
if (special) {
return { $: "array", val: outArr };
} else {
return outArr;
}
}
if (val instanceof Date) {
types.push([path, "date"]);
return val.getTime();
return { $: "date", val: val.getTime() };
}
if (isUserObject(val) || isPlainObject(val)) {
memo.set(val, path);
const outObj: any = {};
for (const x in val) {
const p = [...path, x];
outObj[x] = internalEncapsulate(val[x], outRoot, p, memo, types);
outObj[x] = internalEncapsulate(val[x], p, memo);
}
if ("$" in outObj) {
return { $: "obj", val: outObj };
}
return outObj;
}
if (typeof val === "bigint") {
types.push([path, "bigint"]);
return val.toString();
return { $: "bigint", val: val.toString() };
}
if (typeof val === "boolean") {
return val;
@ -327,123 +324,103 @@ function internalEncapsulate(
throw Error();
}
function derefPath(
root: any,
p1: Array<string | number>,
n: number,
p2: Array<string | number>,
): any {
let v = root;
for (let i = 0; i < n; i++) {
v = v[p1[i]];
}
for (let i = 0; i < p2.length; i++) {
v = v[p2[i]];
}
return v;
}
function internalReviveArray(sval: any, root: any, path: string[]): any {
const newArr: any[] = [];
if (root === undefined) {
root = newArr;
}
for (let i = 0; i < sval.length; i++) {
const p = [...path, String(i)];
newArr.push(internalStructuredRevive(sval[i], root, p));
}
return newArr;
}
function internalReviveObject(sval: any, root: any, path: string[]): any {
const newObj = {} as any;
if (root === undefined) {
root = newObj;
}
for (const key of Object.keys(sval)) {
const p = [...path, key];
newObj[key] = internalStructuredRevive(sval[key], root, p);
}
return newObj;
}
function internalStructuredRevive(sval: any, root: any, path: string[]): any {
if (typeof sval === "string") {
return sval;
}
if (typeof sval === "number") {
return sval;
}
if (typeof sval === "boolean") {
return sval;
}
if (sval === null) {
return null;
}
if (Array.isArray(sval)) {
return internalReviveArray(sval, root, path);
}
if (isUserObject(sval) || isPlainObject(sval)) {
if ("$" in sval) {
const dollar = sval.$;
switch (dollar) {
case "undef":
return undefined;
case "bigint":
return BigInt((sval as any).val);
case "date":
return new Date((sval as any).val);
case "obj": {
return internalReviveObject((sval as any).val, root, path);
}
case "array":
return internalReviveArray((sval as any).val, root, path);
case "ref": {
const level = (sval as any).l;
const p2 = (sval as any).p;
return derefPath(root, path, path.length - level, p2);
}
default:
throw Error();
}
} else {
return internalReviveObject(sval, root, path);
}
}
throw Error();
}
/**
* Encapsulate a cloneable value into a plain JSON object.
* Encapsulate a cloneable value into a plain JSON value.
*/
export function structuredEncapsulate(val: any): any {
const outRoot = {};
const types: Array<[string[], string]> = [];
let res;
res = internalEncapsulate(val, outRoot, [], new Map(), types);
if (res === null) {
return res;
}
// We need to further encapsulate the outer layer
if (
Array.isArray(res) ||
typeof res !== "object" ||
"$" in res ||
"$types" in res
) {
res = { $: res };
}
if (types.length > 0) {
res["$types"] = types;
}
return res;
return internalEncapsulate(val, [], new Map());
}
export function applyLegacyTypeAnnotations(val: any): any {
if (val === null) {
return null;
}
if (typeof val === "number") {
return val;
}
if (typeof val === "string") {
return val;
}
if (typeof val === "boolean") {
return val;
}
if (!isPlainObject(val)) {
throw Error();
}
let types = val.$types ?? [];
delete val.$types;
let outRoot: any;
if ("$" in val) {
outRoot = val.$;
} else {
outRoot = val;
}
function mutatePath(path: string[], f: (x: any) => any): void {
if (path.length == 0) {
outRoot = f(outRoot);
return;
}
let obj = outRoot;
for (let i = 0; i < path.length - 1; i++) {
const n = path[i];
if (!(n in obj)) {
obj[n] = {};
}
obj = obj[n];
}
const last = path[path.length - 1];
obj[last] = f(obj[last]);
}
function lookupPath(path: string[]): any {
let obj = outRoot;
for (const n of path) {
obj = obj[n];
}
return obj;
}
for (const [path, type] of types) {
switch (type) {
case "bigint": {
mutatePath(path, (x) => BigInt(x));
break;
}
case "array": {
mutatePath(path, (x) => {
const newArr: any = [];
for (const k in x) {
newArr[k] = x[k];
}
return newArr;
});
break;
}
case "date": {
mutatePath(path, (x) => new Date(x));
break;
}
case "undef": {
mutatePath(path, (x) => undefined);
break;
}
case "ref": {
mutatePath(path, (x) => lookupPath(x));
break;
}
default:
throw Error(`type '${type}' not implemented`);
}
}
return outRoot;
}
export function internalStructuredRevive(val: any): any {
// FIXME: Do the newly specified, compositional encoding here.
val = JSON.parse(JSON.stringify(val));
return val;
}
export function structuredRevive(val: any): any {
const r = internalStructuredRevive(val);
return applyLegacyTypeAnnotations(r);
export function structuredRevive(sval: any): any {
return internalStructuredRevive(sval, undefined, []);
}
/**

View File

@ -17,7 +17,11 @@
import { IDBValidKey } from "../idbtypes.js";
import { DataError } from "./errors.js";
// https://www.w3.org/TR/IndexedDB-2/#convert-a-value-to-a-key
/**
* Algorithm to "convert a value to a key".
*
* https://www.w3.org/TR/IndexedDB/#convert-value-to-key
*/
export function valueToKey(
input: any,
seen?: Set<object>,

View File

@ -4,7 +4,7 @@
"lib": ["es6"],
"module": "ES2020",
"moduleResolution": "Node16",
"target": "ES6",
"target": "ES2020",
"allowJs": true,
"noImplicitAny": true,
"outDir": "lib",

View File

@ -11,6 +11,7 @@
".": {
"node": "./lib/index.node.js",
"browser": "./lib/index.browser.js",
"qtart": "./lib/index.qtart.js",
"default": "./lib/index.js"
},
"./twrpc": {

View File

@ -0,0 +1,27 @@
/*
This file is part of GNU Taler
(C) 2021 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
import { setPRNG } from "./nacl-fast.js";
setPRNG(function (x: Uint8Array, n: number) {
// @ts-ignore
const va = globalThis._tart.randomBytes(n);
const v = new Uint8Array(va);
for (let i = 0; i < n; i++) x[i] = v[i];
for (let i = 0; i < v.length; i++) v[i] = 0;
});
export * from "./index.js";

View File

@ -48,6 +48,7 @@ import {
RefreshReason,
TalerErrorDetail,
TransactionIdStr,
TransactionStateFilter,
} from "./wallet-types.js";
export interface TransactionsRequest {
@ -65,6 +66,8 @@ export interface TransactionsRequest {
* If true, include all refreshes in the transactions list.
*/
includeRefreshes?: boolean;
filterByState?: TransactionStateFilter
}
export interface TransactionState {

View File

@ -2644,3 +2644,10 @@ export const codecForValidateIbanResponse = (): Codec<ValidateIbanResponse> =>
buildCodecForObject<ValidateIbanResponse>()
.property("valid", codecForBoolean())
.build("ValidateIbanResponse");
export type TransactionStateFilter = "nonfinal";
export interface TransactionRecordFilter {
onlyState?: TransactionStateFilter;
onlyCurrency?: string;
}

View File

@ -24,7 +24,9 @@ install_target = $(prefix)/lib/taler-wallet-cli
.PHONY: install install-nodeps deps
install-nodeps:
./build-node.mjs
@echo installing wallet CLI to $(install_target)
install -d $(prefix)/bin
install -d $(install_target)/build
install -d $(install_target)/bin
install -d $(install_target)/node_modules/taler-wallet-cli
install -d $(install_target)/node_modules/taler-wallet-cli/bin
@ -32,6 +34,8 @@ install-nodeps:
install ./dist/taler-wallet-cli-bundled.cjs $(install_target)/node_modules/taler-wallet-cli/dist/
install ./dist/taler-wallet-cli-bundled.cjs.map $(install_target)/node_modules/taler-wallet-cli/dist/
install ./bin/taler-wallet-cli.mjs $(install_target)/node_modules/taler-wallet-cli/bin/
install ../idb-bridge/node_modules/better-sqlite3/build/Release/better_sqlite3.node $(install_target)/build/ \
|| echo "sqlite3 unavailable, better-sqlite3 native module not found"
ln -sf $(install_target)/node_modules/taler-wallet-cli/bin/taler-wallet-cli.mjs $(prefix)/bin/taler-wallet-cli
deps:
pnpm install --frozen-lockfile --filter @gnu-taler/taler-wallet-cli...

View File

@ -2,3 +2,8 @@
This package provides `taler-wallet-cli`, the command-line interface for the
GNU Taler wallet.
## sqlite3 backend
To be able to use the sqlite3 backend, make sure that better-sqlite3
is installed as an optional dependency in the ../idb-bridge package.

View File

@ -0,0 +1,8 @@
#!/usr/bin/env node
// Execute the wallet CLI from the source directory.
// This script is meant for testing and must not
// be installed.
import { main } from '../lib/index.js';
main();

View File

@ -59,7 +59,7 @@ export const buildConfig = {
conditions: ["qtart"],
sourcemap: true,
// quickjs standard library
external: ["std", "os"],
external: ["std", "os", "better-sqlite3"],
define: {
__VERSION__: `"${_package.version}"`,
__GIT_HASH__: `"${GIT_HASH}"`,

View File

@ -119,7 +119,7 @@ export const CURRENT_DB_CONFIG_KEY = "currentMainDbName";
* backwards-compatible way or object stores and indices
* are added.
*/
export const WALLET_DB_MINOR_VERSION = 9;
export const WALLET_DB_MINOR_VERSION = 10;
/**
* Ranges for operation status fields.
@ -2675,6 +2675,9 @@ export const WalletStoresV1 = {
}),
{
byProposalId: describeIndex("byProposalId", "proposalId"),
byStatus: describeIndex("byStatus", "status", {
versionAdded: 10,
}),
},
),
refundItems: describeStore(

View File

@ -16,7 +16,7 @@
import { WalletNotification } from "@gnu-taler/taler-util";
import { HttpRequestLibrary } from "@gnu-taler/taler-util/http";
import { WalletConfig, WalletConfigParameter } from "./index.js";
import { WalletConfigParameter } from "./index.js";
/**
* Helpers to initiate a wallet in a host environment.

View File

@ -27,6 +27,7 @@ import type { IDBFactory } from "@gnu-taler/idb-bridge";
import {
BridgeIDBFactory,
MemoryBackend,
createSqliteBackend,
shimIndexedDB,
} from "@gnu-taler/idb-bridge";
import { AccessStats } from "@gnu-taler/idb-bridge";
@ -39,24 +40,21 @@ import { createPlatformHttpLib } from "@gnu-taler/taler-util/http";
import { SetTimeoutTimerAPI } from "./util/timer.js";
import { Wallet } from "./wallet.js";
import { DefaultNodeWalletArgs, makeTempfileId } from "./host-common.js";
import { createNodeSqlite3Impl } from "@gnu-taler/idb-bridge/node-sqlite3-bindings";
const logger = new Logger("host-impl.node.ts");
/**
* Get a wallet instance with default settings for node.
*
* Extended version that allows getting DB stats.
*/
export async function createNativeWalletHost2(
interface MakeDbResult {
idbFactory: BridgeIDBFactory;
getStats: () => AccessStats;
}
async function makeFileDb(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend();
myBackend.enableTracing = false;
const storagePath = args.persistentStoragePath;
if (storagePath) {
try {
@ -96,8 +94,41 @@ export async function createNativeWalletHost2(
BridgeIDBFactory.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory;
return {
idbFactory: myBridgeIdbFactory,
getStats: () => myBackend.accessStats,
};
}
async function makeSqliteDb(
args: DefaultNodeWalletArgs,
): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const imp = await createNodeSqlite3Impl();
const myBackend = await createSqliteBackend(imp, {
filename: args.persistentStoragePath ?? ":memory:",
});
myBackend.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
return {
getStats() {
throw Error("not implemented");
},
idbFactory: myBridgeIdbFactory,
};
}
/**
* Get a wallet instance with default settings for node.
*
* Extended version that allows getting DB stats.
*/
export async function createNativeWalletHost2(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
let myHttpLib;
if (args.httpLib) {
myHttpLib = args.httpLib;
@ -115,7 +146,17 @@ export async function createNativeWalletHost2(
);
};
shimIndexedDB(myBridgeIdbFactory);
let dbResp: MakeDbResult;
if (!args.persistentStoragePath || args.persistentStoragePath.endsWith(".json")) {
dbResp = await makeFileDb(args);
} else {
dbResp = await makeSqliteDb(args);
}
const myIdbFactory: IDBFactory = dbResp.idbFactory as any as IDBFactory;
shimIndexedDB(dbResp.idbFactory);
const myDb = await openTalerDatabase(myIdbFactory, myVersionChange);
@ -158,6 +199,6 @@ export async function createNativeWalletHost2(
}
return {
wallet: w,
getDbStats: () => myBackend.accessStats,
getDbStats: dbResp.getStats,
};
}

View File

@ -22,11 +22,17 @@
/**
* Imports.
*/
import type { IDBFactory } from "@gnu-taler/idb-bridge";
import type {
IDBFactory,
ResultRow,
Sqlite3Interface,
Sqlite3Statement,
} from "@gnu-taler/idb-bridge";
// eslint-disable-next-line no-duplicate-imports
import {
BridgeIDBFactory,
MemoryBackend,
createSqliteBackend,
shimIndexedDB,
} from "@gnu-taler/idb-bridge";
import { AccessStats } from "@gnu-taler/idb-bridge";
@ -41,12 +47,78 @@ import { DefaultNodeWalletArgs, makeTempfileId } from "./host-common.js";
const logger = new Logger("host-impl.qtart.ts");
export async function createNativeWalletHost2(
interface MakeDbResult {
idbFactory: BridgeIDBFactory;
getStats: () => AccessStats;
}
let numStmt = 0;
export async function createQtartSqlite3Impl(): Promise<Sqlite3Interface> {
const tart: any = (globalThis as any)._tart;
if (!tart) {
throw Error("globalThis._qtart not defined");
}
return {
open(filename: string) {
const internalDbHandle = tart.sqlite3Open(filename);
return {
internalDbHandle,
close() {
tart.sqlite3Close(internalDbHandle);
},
prepare(stmtStr): Sqlite3Statement {
const stmtHandle = tart.sqlite3Prepare(internalDbHandle, stmtStr);
return {
internalStatement: stmtHandle,
getAll(params): ResultRow[] {
numStmt++;
return tart.sqlite3StmtGetAll(stmtHandle, params);
},
getFirst(params): ResultRow | undefined {
numStmt++;
return tart.sqlite3StmtGetFirst(stmtHandle, params);
},
run(params) {
numStmt++;
return tart.sqlite3StmtRun(stmtHandle, params);
},
};
},
exec(sqlStr): void {
numStmt++;
tart.sqlite3Exec(internalDbHandle, sqlStr);
},
};
},
};
}
async function makeSqliteDb(
args: DefaultNodeWalletArgs,
): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const imp = await createQtartSqlite3Impl();
const myBackend = await createSqliteBackend(imp, {
filename: args.persistentStoragePath ?? ":memory:",
});
myBackend.trackStats = true;
myBackend.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
return {
getStats() {
return {
...myBackend.accessStats,
primitiveStatements: numStmt,
}
},
idbFactory: myBridgeIdbFactory,
};
}
async function makeFileDb(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend();
myBackend.enableTracing = false;
@ -78,12 +150,34 @@ export async function createNativeWalletHost2(
};
}
logger.info("done processing storage path");
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
return {
idbFactory: myBridgeIdbFactory,
getStats: () => myBackend.accessStats,
};
}
export async function createNativeWalletHost2(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
BridgeIDBFactory.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory;
let dbResp: MakeDbResult;
if (args.persistentStoragePath && args.persistentStoragePath.endsWith(".json")) {
logger.info("using JSON file backend (slow!)");
dbResp = await makeFileDb(args);
} else {
logger.info("using JSON file backend (experimental!)");
dbResp = await makeSqliteDb(args)
}
const myIdbFactory: IDBFactory = dbResp.idbFactory as any as IDBFactory;
shimIndexedDB(dbResp.idbFactory);
let myHttpLib;
if (args.httpLib) {
@ -102,8 +196,6 @@ export async function createNativeWalletHost2(
);
};
shimIndexedDB(myBridgeIdbFactory);
const myDb = await openTalerDatabase(myIdbFactory, myVersionChange);
let workerFactory;
@ -124,6 +216,6 @@ export async function createNativeWalletHost2(
}
return {
wallet: w,
getDbStats: () => myBackend.accessStats,
getDbStats: dbResp.getStats,
};
}

View File

@ -16,7 +16,6 @@
import { DefaultNodeWalletArgs } from "./host-common.js";
import { Wallet } from "./index.js";
import * as hostImpl from "#host-impl";
import { AccessStats } from "@gnu-taler/idb-bridge";

View File

@ -34,13 +34,24 @@ import {
WithdrawalGroupStatus,
RewardRecordStatus,
DepositOperationStatus,
RefreshGroupRecord,
WithdrawalGroupRecord,
DepositGroupRecord,
RewardRecord,
PurchaseRecord,
PeerPullPaymentInitiationRecord,
PeerPullPaymentIncomingRecord,
PeerPushPaymentInitiationRecord,
PeerPushPaymentIncomingRecord,
RefundGroupRecord,
RefundGroupStatus,
} from "../db.js";
import {
PendingOperationsResponse,
PendingTaskType,
TaskId,
} from "../pending-types.js";
import { AbsoluteTime } from "@gnu-taler/taler-util";
import { AbsoluteTime, TransactionRecordFilter } from "@gnu-taler/taler-util";
import { InternalWalletState } from "../internal-wallet-state.js";
import { GetReadOnlyAccess } from "../util/query.js";
import { GlobalIDB } from "@gnu-taler/idb-bridge";
@ -105,6 +116,32 @@ async function gatherExchangePending(
});
}
/**
* Iterate refresh records based on a filter.
*/
export async function iterRecordsForRefresh(
tx: GetReadOnlyAccess<{
refreshGroups: typeof WalletStoresV1.refreshGroups;
}>,
filter: TransactionRecordFilter,
f: (r: RefreshGroupRecord) => Promise<void>,
): Promise<void> {
let refreshGroups: RefreshGroupRecord[];
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
OperationStatusRange.ACTIVE_START,
OperationStatusRange.ACTIVE_END,
);
refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll(keyRange);
} else {
refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll();
}
for (const r of refreshGroups) {
await f(r);
}
}
async function gatherRefreshPending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
@ -114,22 +151,13 @@ async function gatherRefreshPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound(
OperationStatusRange.ACTIVE_START,
OperationStatusRange.ACTIVE_END,
);
const refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll(
keyRange,
);
for (const r of refreshGroups) {
await iterRecordsForRefresh(tx, { onlyState: "nonfinal" }, async (r) => {
if (r.timestampFinished) {
return;
}
const opId = TaskIdentifiers.forRefresh(r);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
resp.pendingOperations.push({
type: PendingTaskType.Refresh,
...getPendingCommon(ws, opId, timestampDue),
@ -140,6 +168,30 @@ async function gatherRefreshPending(
),
retryInfo: retryRecord?.retryInfo,
});
});
}
export async function iterRecordsForWithdrawal(
tx: GetReadOnlyAccess<{
withdrawalGroups: typeof WalletStoresV1.withdrawalGroups;
}>,
filter: TransactionRecordFilter,
f: (r: WithdrawalGroupRecord) => Promise<void>,
): Promise<void> {
let withdrawalGroupRecords: WithdrawalGroupRecord[];
if (filter.onlyState === "nonfinal") {
const range = GlobalIDB.KeyRange.bound(
WithdrawalGroupStatus.PendingRegisteringBank,
WithdrawalGroupStatus.PendingAml,
);
withdrawalGroupRecords =
await tx.withdrawalGroups.indexes.byStatus.getAll(range);
} else {
withdrawalGroupRecords =
await tx.withdrawalGroups.indexes.byStatus.getAll();
}
for (const wgr of withdrawalGroupRecords) {
await f(wgr);
}
}
@ -153,12 +205,7 @@ async function gatherWithdrawalPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const range = GlobalIDB.KeyRange.bound(
WithdrawalGroupStatus.PendingRegisteringBank,
WithdrawalGroupStatus.PendingAml,
);
const wsrs = await tx.withdrawalGroups.indexes.byStatus.getAll(range);
for (const wsr of wsrs) {
await iterRecordsForWithdrawal(tx, { onlyState: "nonfinal" }, async (wsr) => {
const opTag = TaskIdentifiers.forWithdrawal(wsr);
let opr = await tx.operationRetries.get(opTag);
const now = AbsoluteTime.now();
@ -184,6 +231,30 @@ async function gatherWithdrawalPending(
lastError: opr.lastError,
retryInfo: opr.retryInfo,
});
});
}
export async function iterRecordsForDeposit(
tx: GetReadOnlyAccess<{
depositGroups: typeof WalletStoresV1.depositGroups;
}>,
filter: TransactionRecordFilter,
f: (r: DepositGroupRecord) => Promise<void>,
): Promise<void> {
let dgs: DepositGroupRecord[];
if (filter.onlyState === "nonfinal") {
dgs = await tx.depositGroups.indexes.byStatus.getAll(
GlobalIDB.KeyRange.bound(
DepositOperationStatus.PendingDeposit,
DepositOperationStatus.PendingKyc,
),
);
} else {
dgs = await tx.depositGroups.indexes.byStatus.getAll();
}
for (const dg of dgs) {
await f(dg);
}
}
@ -196,16 +267,7 @@ async function gatherDepositPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const dgs = await tx.depositGroups.indexes.byStatus.getAll(
GlobalIDB.KeyRange.bound(
DepositOperationStatus.PendingDeposit,
DepositOperationStatus.PendingKyc,
),
);
for (const dg of dgs) {
if (dg.timestampFinished) {
return;
}
await iterRecordsForDeposit(tx, { onlyState: "nonfinal" }, async (dg) => {
let deposited = true;
for (const d of dg.depositedPerCoin) {
if (!d) {
@ -226,10 +288,28 @@ async function gatherDepositPending(
lastError: retryRecord?.lastError,
retryInfo: retryRecord?.retryInfo,
});
});
}
export async function iterRecordsForReward(
tx: GetReadOnlyAccess<{
rewards: typeof WalletStoresV1.rewards;
}>,
filter: TransactionRecordFilter,
f: (r: RewardRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const range = GlobalIDB.KeyRange.bound(
RewardRecordStatus.PendingPickup,
RewardRecordStatus.PendingPickup,
);
await tx.rewards.indexes.byStatus.iter(range).forEachAsync(f);
} else {
await tx.rewards.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherTipPending(
async function gatherRewardPending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
rewards: typeof WalletStoresV1.rewards;
@ -238,15 +318,7 @@ async function gatherTipPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const range = GlobalIDB.KeyRange.bound(
RewardRecordStatus.PendingPickup,
RewardRecordStatus.PendingPickup,
);
await tx.rewards.indexes.byStatus.iter(range).forEachAsync(async (tip) => {
// FIXME: The tip record needs a proper status field!
if (tip.pickedUpTimestamp) {
return;
}
await iterRecordsForReward(tx, { onlyState: "nonfinal" }, async (tip) => {
const opId = TaskIdentifiers.forTipPickup(tip);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
@ -264,6 +336,43 @@ async function gatherTipPending(
});
}
export async function iterRecordsForRefund(
tx: GetReadOnlyAccess<{
refundGroups: typeof WalletStoresV1.refundGroups;
}>,
filter: TransactionRecordFilter,
f: (r: RefundGroupRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.only(
RefundGroupStatus.Pending
);
await tx.refundGroups.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.refundGroups.iter().forEachAsync(f);
}
}
export async function iterRecordsForPurchase(
tx: GetReadOnlyAccess<{
purchases: typeof WalletStoresV1.purchases;
}>,
filter: TransactionRecordFilter,
f: (r: PurchaseRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PurchaseStatus.PendingDownloadingProposal,
PurchaseStatus.PendingAcceptRefund,
);
await tx.purchases.indexes.byStatus.iter(keyRange).forEachAsync(f);
} else {
await tx.purchases.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherPurchasePending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
@ -273,17 +382,10 @@ async function gatherPurchasePending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound(
PurchaseStatus.PendingDownloadingProposal,
PurchaseStatus.PendingAcceptRefund,
);
await tx.purchases.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pr) => {
await iterRecordsForPurchase(tx, { onlyState: "nonfinal" }, async (pr) => {
const opId = TaskIdentifiers.forPay(pr);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
resp.pendingOperations.push({
type: PendingTaskType.Purchase,
...getPendingCommon(ws, opId, timestampDue),
@ -362,6 +464,26 @@ async function gatherBackupPending(
});
}
export async function iterRecordsForPeerPullInitiation(
tx: GetReadOnlyAccess<{
peerPullPaymentInitiations: typeof WalletStoresV1.peerPullPaymentInitiations;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPullPaymentInitiationRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPullPaymentInitiationStatus.PendingCreatePurse,
PeerPullPaymentInitiationStatus.AbortingDeletePurse,
);
await tx.peerPullPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPullPaymentInitiations.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherPeerPullInitiationPending(
ws: InternalWalletState,
tx: GetReadOnlyAccess<{
@ -371,13 +493,10 @@ async function gatherPeerPullInitiationPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPullPaymentInitiationStatus.PendingCreatePurse,
PeerPullPaymentInitiationStatus.AbortingDeletePurse,
);
await tx.peerPullPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pi) => {
await iterRecordsForPeerPullInitiation(
tx,
{ onlyState: "nonfinal" },
async (pi) => {
const opId = TaskIdentifiers.forPeerPullPaymentInitiation(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@ -389,7 +508,28 @@ async function gatherPeerPullInitiationPending(
retryInfo: retryRecord?.retryInfo,
pursePub: pi.pursePub,
});
});
},
);
}
export async function iterRecordsForPeerPullDebit(
tx: GetReadOnlyAccess<{
peerPullPaymentIncoming: typeof WalletStoresV1.peerPullPaymentIncoming;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPullPaymentIncomingRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPullDebitRecordStatus.PendingDeposit,
PeerPullDebitRecordStatus.AbortingRefresh,
);
await tx.peerPullPaymentIncoming.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPullPaymentIncoming.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherPeerPullDebitPending(
@ -401,13 +541,10 @@ async function gatherPeerPullDebitPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPullDebitRecordStatus.PendingDeposit,
PeerPullDebitRecordStatus.AbortingRefresh,
);
await tx.peerPullPaymentIncoming.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pi) => {
await iterRecordsForPeerPullDebit(
tx,
{ onlyState: "nonfinal" },
async (pi) => {
const opId = TaskIdentifiers.forPeerPullPaymentDebit(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@ -419,7 +556,28 @@ async function gatherPeerPullDebitPending(
retryInfo: retryRecord?.retryInfo,
peerPullPaymentIncomingId: pi.peerPullPaymentIncomingId,
});
});
},
);
}
export async function iterRecordsForPeerPushInitiation(
tx: GetReadOnlyAccess<{
peerPushPaymentInitiations: typeof WalletStoresV1.peerPushPaymentInitiations;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPushPaymentInitiationRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPushPaymentInitiationStatus.PendingCreatePurse,
PeerPushPaymentInitiationStatus.AbortingRefresh,
);
await tx.peerPushPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPushPaymentInitiations.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherPeerPushInitiationPending(
@ -431,13 +589,10 @@ async function gatherPeerPushInitiationPending(
now: AbsoluteTime,
resp: PendingOperationsResponse,
): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPushPaymentInitiationStatus.PendingCreatePurse,
PeerPushPaymentInitiationStatus.AbortingRefresh,
);
await tx.peerPushPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pi) => {
await iterRecordsForPeerPushInitiation(
tx,
{ onlyState: "nonfinal" },
async (pi) => {
const opId = TaskIdentifiers.forPeerPushPaymentInitiation(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@ -449,7 +604,28 @@ async function gatherPeerPushInitiationPending(
retryInfo: retryRecord?.retryInfo,
pursePub: pi.pursePub,
});
});
},
);
}
export async function iterRecordsForPeerPushCredit(
tx: GetReadOnlyAccess<{
peerPushPaymentIncoming: typeof WalletStoresV1.peerPushPaymentIncoming;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPushPaymentIncomingRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPushPaymentIncomingStatus.PendingMerge,
PeerPushPaymentIncomingStatus.PendingWithdrawing,
);
await tx.peerPushPaymentIncoming.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPushPaymentIncoming.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherPeerPushCreditPending(
@ -465,9 +641,10 @@ async function gatherPeerPushCreditPending(
PeerPushPaymentIncomingStatus.PendingMerge,
PeerPushPaymentIncomingStatus.PendingWithdrawing,
);
await tx.peerPushPaymentIncoming.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pi) => {
await iterRecordsForPeerPushCredit(
tx,
{ onlyState: "nonfinal" },
async (pi) => {
const opId = TaskIdentifiers.forPeerPushCredit(pi);
const retryRecord = await tx.operationRetries.get(opId);
const timestampDue =
@ -479,7 +656,8 @@ async function gatherPeerPushCreditPending(
retryInfo: retryRecord?.retryInfo,
peerPushPaymentIncomingId: pi.peerPushPaymentIncomingId,
});
});
},
);
}
export async function getPendingOperations(
@ -513,7 +691,7 @@ export async function getPendingOperations(
await gatherRefreshPending(ws, tx, now, resp);
await gatherWithdrawalPending(ws, tx, now, resp);
await gatherDepositPending(ws, tx, now, resp);
await gatherTipPending(ws, tx, now, resp);
await gatherRewardPending(ws, tx, now, resp);
await gatherPurchasePending(ws, tx, now, resp);
await gatherRecoupPending(ws, tx, now, resp);
await gatherBackupPending(ws, tx, now, resp);

View File

@ -472,12 +472,15 @@ export async function waitUntilDone(ws: InternalWalletState): Promise<void> {
p = openPromise();
const txs = await getTransactions(ws, {
includeRefreshes: true,
filterByState: "nonfinal",
});
let finished = true;
for (const tx of txs.transactions) {
switch (tx.txState.major) {
case TransactionMajorState.Pending:
case TransactionMajorState.Aborting:
case TransactionMajorState.Suspended:
case TransactionMajorState.SuspendedAborting:
finished = false;
logger.info(
`continuing waiting, ${tx.transactionId} in ${tx.txState.major}(${tx.txState.minor})`,

View File

@ -36,6 +36,7 @@ import {
TransactionByIdRequest,
TransactionIdStr,
TransactionMajorState,
TransactionRecordFilter,
TransactionsRequest,
TransactionsResponse,
TransactionState,
@ -153,6 +154,7 @@ import {
resumePeerPushDebitTransaction,
abortPeerPushDebitTransaction,
} from "./pay-peer-push-debit.js";
import { iterRecordsForDeposit, iterRecordsForPeerPullDebit, iterRecordsForPeerPullInitiation, iterRecordsForPeerPushCredit, iterRecordsForPeerPushInitiation, iterRecordsForPurchase, iterRecordsForRefresh, iterRecordsForRefund, iterRecordsForReward, iterRecordsForWithdrawal } from "./pending.js";
const logger = new Logger("taler-wallet-core:transactions.ts");
@ -929,6 +931,11 @@ export async function getTransactions(
): Promise<TransactionsResponse> {
const transactions: Transaction[] = [];
const filter: TransactionRecordFilter = {};
if (transactionsRequest?.filterByState) {
filter.onlyState = transactionsRequest.filterByState;
}
await ws.db
.mktx((x) => [
x.coins,
@ -952,7 +959,7 @@ export async function getTransactions(
x.refundGroups,
])
.runReadOnly(async (tx) => {
tx.peerPushPaymentInitiations.iter().forEachAsync(async (pi) => {
await iterRecordsForPeerPushInitiation(tx, filter, async (pi) => {
const amount = Amounts.parseOrThrow(pi.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
@ -968,7 +975,7 @@ export async function getTransactions(
);
});
tx.peerPullPaymentIncoming.iter().forEachAsync(async (pi) => {
await iterRecordsForPeerPullDebit(tx, filter, async (pi) => {
const amount = Amounts.parseOrThrow(pi.contractTerms.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
return;
@ -986,7 +993,7 @@ export async function getTransactions(
transactions.push(buildTransactionForPullPaymentDebit(pi));
});
tx.peerPushPaymentIncoming.iter().forEachAsync(async (pi) => {
await iterRecordsForPeerPushCredit(tx, filter, async (pi) => {
if (!pi.currency) {
// Legacy transaction
return;
@ -1027,7 +1034,7 @@ export async function getTransactions(
);
});
tx.peerPullPaymentInitiations.iter().forEachAsync(async (pi) => {
await iterRecordsForPeerPullInitiation(tx, filter, async (pi) => {
const currency = Amounts.currencyOf(pi.amount);
if (shouldSkipCurrency(transactionsRequest, currency)) {
return;
@ -1060,7 +1067,7 @@ export async function getTransactions(
);
});
tx.refundGroups.iter().forEachAsync(async (refundGroup) => {
await iterRecordsForRefund(tx, filter, async (refundGroup) => {
const currency = Amounts.currencyOf(refundGroup.amountRaw);
if (shouldSkipCurrency(transactionsRequest, currency)) {
return;
@ -1072,7 +1079,7 @@ export async function getTransactions(
transactions.push(buildTransactionForRefund(refundGroup, contractData));
});
tx.refreshGroups.iter().forEachAsync(async (rg) => {
await iterRecordsForRefresh(tx, filter, async (rg) => {
if (shouldSkipCurrency(transactionsRequest, rg.currency)) {
return;
}
@ -1092,7 +1099,7 @@ export async function getTransactions(
}
});
tx.withdrawalGroups.iter().forEachAsync(async (wsr) => {
await iterRecordsForWithdrawal(tx, filter ,async (wsr) => {
if (
shouldSkipCurrency(
transactionsRequest,
@ -1146,7 +1153,7 @@ export async function getTransactions(
}
});
tx.depositGroups.iter().forEachAsync(async (dg) => {
await iterRecordsForDeposit(tx, filter, async (dg) => {
const amount = Amounts.parseOrThrow(dg.contractTermsRaw.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
return;
@ -1157,7 +1164,7 @@ export async function getTransactions(
transactions.push(buildTransactionForDeposit(dg, retryRecord));
});
tx.purchases.iter().forEachAsync(async (purchase) => {
await iterRecordsForPurchase(tx, filter, async (purchase) => {
const download = purchase.download;
if (!download) {
return;
@ -1200,7 +1207,7 @@ export async function getTransactions(
);
});
tx.rewards.iter().forEachAsync(async (tipRecord) => {
await iterRecordsForReward(tx, filter, async (tipRecord) => {
if (
shouldSkipCurrency(
transactionsRequest,

View File

@ -338,7 +338,7 @@ interface IndexReadOnlyAccessor<RecordType> {
iter(query?: IDBKeyRange | IDBValidKey): ResultStream<RecordType>;
get(query: IDBValidKey): Promise<RecordType | undefined>;
getAll(
query: IDBKeyRange | IDBValidKey,
query?: IDBKeyRange | IDBValidKey,
count?: number,
): Promise<RecordType[]>;
}
@ -351,7 +351,7 @@ interface IndexReadWriteAccessor<RecordType> {
iter(query: IDBKeyRange | IDBValidKey): ResultStream<RecordType>;
get(query: IDBValidKey): Promise<RecordType | undefined>;
getAll(
query: IDBKeyRange | IDBValidKey,
query?: IDBKeyRange | IDBValidKey,
count?: number,
): Promise<RecordType[]>;
}

View File

@ -479,6 +479,7 @@ async function runTaskLoop(
// Wait until either the timeout, or we are notified (via the latch)
// that more work might be available.
await Promise.race([timeout, ws.workAvailable.wait()]);
logger.trace(`done waiting for available work`);
} else {
logger.trace(
`running ${pending.pendingOperations.length} pending operations`,

View File

@ -51,7 +51,7 @@ export const buildConfig = {
target: [
'es2020'
],
external: ["os", "std"],
external: ["os", "std", "better-sqlite3"],
format: 'esm',
platform: 'neutral',
mainFields: ["module", "main"],

View File

@ -27,9 +27,9 @@ import {
CoreApiResponseSuccess,
getErrorDetailFromException,
InitRequest,
j2s,
Logger,
setGlobalLogLevelFromString,
setPRNG,
WalletNotification,
} from "@gnu-taler/taler-util";
import { createPlatformHttpLib } from "@gnu-taler/taler-util/http";
@ -47,20 +47,11 @@ import {
getRecoveryStartState,
discoverPolicies,
mergeDiscoveryAggregate,
ReducerState,
} from "@gnu-taler/anastasis-core";
import { userIdentifierDerive } from "@gnu-taler/anastasis-core/lib/crypto.js";
setGlobalLogLevelFromString("trace");
setPRNG(function (x: Uint8Array, n: number) {
// @ts-ignore
const va = globalThis._tart.randomBytes(n);
const v = new Uint8Array(va);
for (let i = 0; i < n; i++) x[i] = v[i];
for (let i = 0; i < v.length; i++) v[i] = 0;
});
const logger = new Logger("taler-wallet-embedded/index.ts");
/**
@ -222,6 +213,8 @@ async function handleAnastasisRequest(
cursor: discoverRes.cursor,
},
});
default:
throw Error("unsupported anastasis operation");
}
}
@ -295,10 +288,10 @@ export async function testWithGv() {
});
}
export async function testWithLocal() {
export async function testWithLocal(path: string) {
console.log("running local test");
const w = await createNativeWalletHost2({
persistentStoragePath: "walletdb.json",
persistentStoragePath: path ?? "walletdb.json",
config: {
features: {
allowHttp: true,
@ -310,7 +303,7 @@ export async function testWithLocal() {
skipDefaults: true,
});
console.log("initialized wallet");
await w.wallet.client.call(WalletApiOperation.RunIntegrationTestV2, {
await w.wallet.client.call(WalletApiOperation.RunIntegrationTest, {
amountToSpend: "TESTKUDOS:1",
amountToWithdraw: "TESTKUDOS:3",
bankAccessApiBaseUrl: "http://localhost:8082/taler-bank-access/",
@ -323,6 +316,7 @@ export async function testWithLocal() {
});
console.log("done with task loop");
w.wallet.stop();
console.log("DB stats:", j2s(w.getDbStats()));
}
export async function testArgon2id() {

File diff suppressed because it is too large Load Diff