Merge branch 'master' into age-withdraw

This commit is contained in:
Özgür Kesim 2023-08-25 13:24:08 +02:00
commit 5ab3070b3a
Signed by: oec
GPG Key ID: 3D76A56D79EDD9D7
91 changed files with 5858 additions and 1671 deletions

View File

@ -18,22 +18,26 @@
"exports": { "exports": {
".": { ".": {
"default": "./lib/index.js" "default": "./lib/index.js"
},
"./node-sqlite3-bindings": {
"default": "./lib/node-sqlite3-impl.js"
} }
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^18.11.17", "@types/better-sqlite3": "^7.6.4",
"ava": "^4.3.3", "@types/node": "^20.4.1",
"esm": "^3.2.25", "ava": "^5.3.1",
"prettier": "^2.8.8", "prettier": "^2.8.8",
"rimraf": "^3.0.2", "rimraf": "^5.0.1",
"typescript": "^5.1.3" "typescript": "^5.1.6"
}, },
"dependencies": { "dependencies": {
"tslib": "^2.5.3" "tslib": "^2.6.0"
}, },
"ava": { "ava": {
"require": [ "failFast": true
"esm" },
] "optionalDependencies": {
"better-sqlite3": "^8.4.0"
} }
} }

View File

@ -15,334 +15,9 @@
*/ */
import test from "ava"; import test from "ava";
import {
BridgeIDBCursorWithValue,
BridgeIDBDatabase,
BridgeIDBFactory,
BridgeIDBKeyRange,
BridgeIDBRequest,
BridgeIDBTransaction,
} from "./bridge-idb.js";
import {
IDBCursorDirection,
IDBCursorWithValue,
IDBDatabase,
IDBKeyRange,
IDBValidKey,
} from "./idbtypes.js";
import { MemoryBackend } from "./MemoryBackend.js"; import { MemoryBackend } from "./MemoryBackend.js";
import { BridgeIDBDatabase, BridgeIDBFactory } from "./bridge-idb.js";
function promiseFromRequest(request: BridgeIDBRequest): Promise<any> { import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js";
return new Promise((resolve, reject) => {
request.onsuccess = () => {
resolve(request.result);
};
request.onerror = () => {
reject(request.error);
};
});
}
function promiseFromTransaction(
transaction: BridgeIDBTransaction,
): Promise<void> {
return new Promise<void>((resolve, reject) => {
transaction.oncomplete = () => {
resolve();
};
transaction.onerror = () => {
reject();
};
});
}
test("Spec: Example 1 Part 1", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
// Populate with initial data.
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
};
await promiseFromRequest(request);
t.pass();
});
test("Spec: Example 1 Part 2", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "library");
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
t.pass();
});
test("Spec: Example 1 Part 3", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "library");
const tx = db.transaction("books", "readwrite");
const store = tx.objectStore("books");
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readonly");
const store2 = tx2.objectStore("books");
var index2 = store2.index("by_title");
const request2 = index2.get("Bedrock Nights");
const result2: any = await promiseFromRequest(request2);
t.is(result2.author, "Barney");
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
await promiseFromRequest(request3);
let cursor: BridgeIDBCursorWithValue | null;
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request3);
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 234567);
await promiseFromTransaction(tx3);
const tx4 = db.transaction("books", "readonly");
const store4 = tx4.objectStore("books");
const request4 = store4.openCursor();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 234567);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 345678);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
t.is(cursor, null);
const tx5 = db.transaction("books", "readonly");
const store5 = tx5.objectStore("books");
const index5 = store5.index("by_author");
const request5 = index5.openCursor(null, "next");
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
t.is(cursor, null);
const request6 = index5.openCursor(null, "nextunique");
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
t.is(cursor, null);
const request7 = index5.openCursor(null, "prevunique");
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
t.is(cursor, null);
db.close();
t.pass();
});
test("simple deletion", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const request = idb.open("library");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "library");
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readwrite");
const store2 = tx2.objectStore("books");
const req1 = store2.get(234567);
await promiseFromRequest(req1);
t.is(req1.readyState, "done");
t.is(req1.result.author, "Fred");
store2.delete(123456);
const req2 = store2.get(123456);
await promiseFromRequest(req2);
t.is(req2.readyState, "done");
t.is(req2.result, undefined);
const req3 = store2.get(234567);
await promiseFromRequest(req3);
t.is(req3.readyState, "done");
t.is(req3.result.author, "Fred");
await promiseFromTransaction(tx2);
t.pass();
});
test("export", async (t) => { test("export", async (t) => {
const backend = new MemoryBackend(); const backend = new MemoryBackend();
@ -386,276 +61,3 @@ test("export", async (t) => {
t.is(exportedData2.databases["library"].schema.databaseVersion, 42); t.is(exportedData2.databases["library"].schema.databaseVersion, 42);
t.pass(); t.pass();
}); });
test("update with non-existent index values", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("mydb");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "mydb");
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_z");
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_z");
{
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes, undefined);
}
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
t.pass();
});
test("delete from unique index", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("mydb");
request.onupgradeneeded = () => {
const db = request.result as IDBDatabase;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_yz", ["y", "z"], {
unique: true,
});
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "mydb");
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_yz");
const indRes = await promiseFromRequest(index.get(["a", 42]));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42, extra: 123 });
await promiseFromTransaction(tx);
}
t.pass();
});
test("range queries", async (t) => {
const backend = new MemoryBackend();
backend.enableTracing = true;
const idb = new BridgeIDBFactory(backend);
const request = idb.open("mydb");
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, "mydb");
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
store.put({ x: 2, y: "a" });
store.put({ x: 4, y: "b" });
store.put({ x: 8, y: "b" });
store.put({ x: 10, y: "c" });
store.put({ x: 12, y: "c" });
await promiseFromTransaction(tx);
async function doCursorStoreQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const vals: any[] = [];
const req = store.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
async function doCursorIndexQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_y");
const vals: any[] = [];
const req = index.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
await doCursorStoreQuery(undefined, undefined, [
{
x: 0,
y: "a",
},
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
{
x: 12,
y: "c",
},
]);
await doCursorStoreQuery(
BridgeIDBKeyRange.bound(0, 12, true, true),
undefined,
[
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
],
);
await doCursorIndexQuery(
BridgeIDBKeyRange.bound("a", "c", true, true),
undefined,
[
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
],
);
await doCursorIndexQuery(undefined, "nextunique", [
{
x: 0,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 10,
y: "c",
},
]);
await doCursorIndexQuery(undefined, "prevunique", [
{
x: 10,
y: "c",
},
{
x: 4,
y: "b",
},
{
x: 0,
y: "a",
},
]);
db.close();
t.pass();
});

View File

@ -14,43 +14,38 @@
permissions and limitations under the License. permissions and limitations under the License.
*/ */
import { AsyncCondition, TransactionLevel } from "./backend-common.js";
import { import {
Backend, Backend,
ConnectResult,
DatabaseConnection, DatabaseConnection,
DatabaseTransaction, DatabaseTransaction,
Schema, IndexGetQuery,
RecordStoreRequest, IndexMeta,
IndexProperties, ObjectStoreGetQuery,
RecordGetRequest, ObjectStoreMeta,
RecordGetResponse, RecordGetResponse,
RecordStoreRequest,
RecordStoreResponse,
ResultLevel, ResultLevel,
StoreLevel, StoreLevel,
RecordStoreResponse,
} from "./backend-interface.js"; } from "./backend-interface.js";
import { BridgeIDBKeyRange } from "./bridge-idb.js";
import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js";
import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js";
import { compareKeys } from "./util/cmp.js";
import { ConstraintError, DataError } from "./util/errors.js";
import { getIndexKeys } from "./util/getIndexKeys.js";
import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
import { import {
structuredClone, structuredClone,
structuredEncapsulate, structuredEncapsulate,
structuredRevive, structuredRevive,
} from "./util/structuredClone.js"; } from "./util/structuredClone.js";
import { ConstraintError, DataError } from "./util/errors.js";
import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree.js";
import { compareKeys } from "./util/cmp.js";
import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue.js";
import { getIndexKeys } from "./util/getIndexKeys.js";
import { openPromise } from "./util/openPromise.js";
import { IDBKeyRange, IDBTransactionMode, IDBValidKey } from "./idbtypes.js";
import { BridgeIDBKeyRange } from "./bridge-idb.js";
type Key = IDBValidKey; type Key = IDBValidKey;
type Value = unknown; type Value = unknown;
enum TransactionLevel {
None = 0,
Read = 1,
Write = 2,
VersionChange = 3,
}
interface ObjectStore { interface ObjectStore {
originalName: string; originalName: string;
modifiedName: string | undefined; modifiedName: string | undefined;
@ -95,24 +90,39 @@ interface Database {
connectionCookies: string[]; connectionCookies: string[];
} }
/** @public */
export interface ObjectStoreDump { export interface ObjectStoreDump {
name: string; name: string;
keyGenerator: number; keyGenerator: number;
records: ObjectStoreRecord[]; records: ObjectStoreRecord[];
} }
/** @public */
export interface DatabaseDump { export interface DatabaseDump {
schema: Schema; schema: Schema;
objectStores: { [name: string]: ObjectStoreDump }; objectStores: { [name: string]: ObjectStoreDump };
} }
/** @public */
export interface MemoryBackendDump { export interface MemoryBackendDump {
databases: { [name: string]: DatabaseDump }; databases: { [name: string]: DatabaseDump };
} }
export interface ObjectStoreProperties {
keyPath: string | string[] | null;
autoIncrement: boolean;
indexes: { [nameame: string]: IndexProperties };
}
export interface IndexProperties {
keyPath: string | string[];
multiEntry: boolean;
unique: boolean;
}
export interface Schema {
databaseName: string;
databaseVersion: number;
objectStores: { [name: string]: ObjectStoreProperties };
}
interface ObjectStoreMapEntry { interface ObjectStoreMapEntry {
store: ObjectStore; store: ObjectStore;
indexMap: { [currentName: string]: Index }; indexMap: { [currentName: string]: Index };
@ -142,27 +152,6 @@ export interface ObjectStoreRecord {
value: Value; value: Value;
} }
class AsyncCondition {
_waitPromise: Promise<void>;
_resolveWaitPromise: () => void;
constructor() {
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
wait(): Promise<void> {
return this._waitPromise;
}
trigger(): void {
this._resolveWaitPromise();
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
}
function nextStoreKey<T>( function nextStoreKey<T>(
forward: boolean, forward: boolean,
data: ISortedMapF<Key, ObjectStoreRecord>, data: ISortedMapF<Key, ObjectStoreRecord>,
@ -178,12 +167,6 @@ function nextStoreKey<T>(
return res[1].primaryKey; return res[1].primaryKey;
} }
function assertInvariant(cond: boolean): asserts cond {
if (!cond) {
throw Error("invariant failed");
}
}
function nextKey( function nextKey(
forward: boolean, forward: boolean,
tree: ISortedSetF<IDBValidKey>, tree: ISortedSetF<IDBValidKey>,
@ -230,6 +213,7 @@ function furthestKey(
} }
export interface AccessStats { export interface AccessStats {
primitiveStatements: number;
writeTransactions: number; writeTransactions: number;
readTransactions: number; readTransactions: number;
writesPerStore: Record<string, number>; writesPerStore: Record<string, number>;
@ -279,6 +263,7 @@ export class MemoryBackend implements Backend {
trackStats: boolean = true; trackStats: boolean = true;
accessStats: AccessStats = { accessStats: AccessStats = {
primitiveStatements: 0,
readTransactions: 0, readTransactions: 0,
writeTransactions: 0, writeTransactions: 0,
readsPerStore: {}, readsPerStore: {},
@ -459,7 +444,7 @@ export class MemoryBackend implements Backend {
delete this.databases[name]; delete this.databases[name];
} }
async connectDatabase(name: string): Promise<DatabaseConnection> { async connectDatabase(name: string): Promise<ConnectResult> {
if (this.enableTracing) { if (this.enableTracing) {
console.log(`TRACING: connectDatabase(${name})`); console.log(`TRACING: connectDatabase(${name})`);
} }
@ -498,7 +483,11 @@ export class MemoryBackend implements Backend {
this.connections[connectionCookie] = myConn; this.connections[connectionCookie] = myConn;
return { connectionCookie }; return {
conn: { connectionCookie },
version: database.committedSchema.databaseVersion,
objectStores: Object.keys(database.committedSchema.objectStores).sort(),
};
} }
async beginTransaction( async beginTransaction(
@ -601,14 +590,6 @@ export class MemoryBackend implements Backend {
this.disconnectCond.trigger(); this.disconnectCond.trigger();
} }
private requireConnection(dbConn: DatabaseConnection): Connection {
const myConn = this.connections[dbConn.connectionCookie];
if (!myConn) {
throw Error(`unknown connection (${dbConn.connectionCookie})`);
}
return myConn;
}
private requireConnectionFromTransaction( private requireConnectionFromTransaction(
btx: DatabaseTransaction, btx: DatabaseTransaction,
): Connection { ): Connection {
@ -619,36 +600,6 @@ export class MemoryBackend implements Backend {
return myConn; return myConn;
} }
getSchema(dbConn: DatabaseConnection): Schema {
if (this.enableTracing) {
console.log(`TRACING: getSchema`);
}
const myConn = this.requireConnection(dbConn);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
return db.committedSchema;
}
getCurrentTransactionSchema(btx: DatabaseTransaction): Schema {
const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
return myConn.modifiedSchema;
}
getInitialTransactionSchema(btx: DatabaseTransaction): Schema {
const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
return db.committedSchema;
}
renameIndex( renameIndex(
btx: DatabaseTransaction, btx: DatabaseTransaction,
objectStoreName: string, objectStoreName: string,
@ -799,7 +750,7 @@ export class MemoryBackend implements Backend {
createObjectStore( createObjectStore(
btx: DatabaseTransaction, btx: DatabaseTransaction,
name: string, name: string,
keyPath: string[] | null, keyPath: string | string[] | null,
autoIncrement: boolean, autoIncrement: boolean,
): void { ): void {
if (this.enableTracing) { if (this.enableTracing) {
@ -842,7 +793,7 @@ export class MemoryBackend implements Backend {
btx: DatabaseTransaction, btx: DatabaseTransaction,
indexName: string, indexName: string,
objectStoreName: string, objectStoreName: string,
keyPath: string[], keyPath: string | string[],
multiEntry: boolean, multiEntry: boolean,
unique: boolean, unique: boolean,
): void { ): void {
@ -1102,12 +1053,91 @@ export class MemoryBackend implements Backend {
} }
} }
async getRecords( async getObjectStoreRecords(
btx: DatabaseTransaction, btx: DatabaseTransaction,
req: RecordGetRequest, req: ObjectStoreGetQuery,
): Promise<RecordGetResponse> { ): Promise<RecordGetResponse> {
if (this.enableTracing) { if (this.enableTracing) {
console.log(`TRACING: getRecords`); console.log(`TRACING: getObjectStoreRecords`);
console.log("query", req);
}
const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName];
if (!db) {
throw Error("db not found");
}
if (db.txLevel < TransactionLevel.Read) {
throw Error("only allowed while running a transaction");
}
if (
db.txRestrictObjectStores &&
!db.txRestrictObjectStores.includes(req.objectStoreName)
) {
throw Error(
`Not allowed to access store '${
req.objectStoreName
}', transaction is over ${JSON.stringify(db.txRestrictObjectStores)}`,
);
}
const objectStoreMapEntry = myConn.objectStoreMap[req.objectStoreName];
if (!objectStoreMapEntry) {
throw Error("object store not found");
}
let range;
if (req.range == null) {
range = new BridgeIDBKeyRange(undefined, undefined, true, true);
} else {
range = req.range;
}
if (typeof range !== "object") {
throw Error(
"getObjectStoreRecords was given an invalid range (sanity check failed, not an object)",
);
}
if (!("lowerOpen" in range)) {
throw Error(
"getObjectStoreRecords was given an invalid range (sanity check failed, lowerOpen missing)",
);
}
const forward: boolean =
req.direction === "next" || req.direction === "nextunique";
const storeData =
objectStoreMapEntry.store.modifiedData ||
objectStoreMapEntry.store.originalData;
const resp = getObjectStoreRecords({
forward,
storeData,
limit: req.limit,
range,
resultLevel: req.resultLevel,
advancePrimaryKey: req.advancePrimaryKey,
lastObjectStorePosition: req.lastObjectStorePosition,
});
if (this.trackStats) {
const k = `${req.objectStoreName}`;
this.accessStats.readsPerStore[k] =
(this.accessStats.readsPerStore[k] ?? 0) + 1;
this.accessStats.readItemsPerStore[k] =
(this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
}
if (this.enableTracing) {
console.log(`TRACING: getRecords got ${resp.count} results`);
}
return resp;
}
async getIndexRecords(
btx: DatabaseTransaction,
req: IndexGetQuery,
): Promise<RecordGetResponse> {
if (this.enableTracing) {
console.log(`TRACING: getIndexRecords`);
console.log("query", req); console.log("query", req);
} }
const myConn = this.requireConnectionFromTransaction(btx); const myConn = this.requireConnectionFromTransaction(btx);
@ -1161,58 +1191,31 @@ export class MemoryBackend implements Backend {
objectStoreMapEntry.store.modifiedData || objectStoreMapEntry.store.modifiedData ||
objectStoreMapEntry.store.originalData; objectStoreMapEntry.store.originalData;
const haveIndex = req.indexName !== undefined; const index =
myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!];
let resp: RecordGetResponse; const indexData = index.modifiedData || index.originalData;
const resp = getIndexRecords({
if (haveIndex) { forward,
const index = indexData,
myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!]; storeData,
const indexData = index.modifiedData || index.originalData; limit: req.limit,
resp = getIndexRecords({ unique,
forward, range,
indexData, resultLevel: req.resultLevel,
storeData, advanceIndexKey: req.advanceIndexKey,
limit: req.limit, advancePrimaryKey: req.advancePrimaryKey,
unique, lastIndexPosition: req.lastIndexPosition,
range, lastObjectStorePosition: req.lastObjectStorePosition,
resultLevel: req.resultLevel, });
advanceIndexKey: req.advanceIndexKey, if (this.trackStats) {
advancePrimaryKey: req.advancePrimaryKey, const k = `${req.objectStoreName}.${req.indexName}`;
lastIndexPosition: req.lastIndexPosition, this.accessStats.readsPerIndex[k] =
lastObjectStorePosition: req.lastObjectStorePosition, (this.accessStats.readsPerIndex[k] ?? 0) + 1;
}); this.accessStats.readItemsPerIndex[k] =
if (this.trackStats) { (this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count;
const k = `${req.objectStoreName}.${req.indexName}`;
this.accessStats.readsPerIndex[k] =
(this.accessStats.readsPerIndex[k] ?? 0) + 1;
this.accessStats.readItemsPerIndex[k] =
(this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count;
}
} else {
if (req.advanceIndexKey !== undefined) {
throw Error("unsupported request");
}
resp = getObjectStoreRecords({
forward,
storeData,
limit: req.limit,
range,
resultLevel: req.resultLevel,
advancePrimaryKey: req.advancePrimaryKey,
lastIndexPosition: req.lastIndexPosition,
lastObjectStorePosition: req.lastObjectStorePosition,
});
if (this.trackStats) {
const k = `${req.objectStoreName}`;
this.accessStats.readsPerStore[k] =
(this.accessStats.readsPerStore[k] ?? 0) + 1;
this.accessStats.readItemsPerStore[k] =
(this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
}
} }
if (this.enableTracing) { if (this.enableTracing) {
console.log(`TRACING: getRecords got ${resp.count} results`); console.log(`TRACING: getIndexRecords got ${resp.count} results`);
} }
return resp; return resp;
} }
@ -1294,13 +1297,13 @@ export class MemoryBackend implements Backend {
let storeKeyResult: StoreKeyResult; let storeKeyResult: StoreKeyResult;
try { try {
storeKeyResult = makeStoreKeyValue( storeKeyResult = makeStoreKeyValue({
storeReq.value, value: storeReq.value,
storeReq.key, key: storeReq.key,
keygen, currentKeyGenerator: keygen,
autoIncrement, autoIncrement: autoIncrement,
keyPath, keyPath: keyPath,
); });
} catch (e) { } catch (e) {
if (e instanceof DataError) { if (e instanceof DataError) {
const kp = JSON.stringify(keyPath); const kp = JSON.stringify(keyPath);
@ -1445,7 +1448,7 @@ export class MemoryBackend implements Backend {
} }
} }
async rollback(btx: DatabaseTransaction): Promise<void> { rollback(btx: DatabaseTransaction): void {
if (this.enableTracing) { if (this.enableTracing) {
console.log(`TRACING: rollback`); console.log(`TRACING: rollback`);
} }
@ -1536,6 +1539,57 @@ export class MemoryBackend implements Backend {
await this.afterCommitCallback(); await this.afterCommitCallback();
} }
} }
getObjectStoreMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
): ObjectStoreMeta | undefined {
const conn = this.connections[dbConn.connectionCookie];
if (!conn) {
throw Error("db connection not found");
}
let schema = conn.modifiedSchema;
if (!schema) {
throw Error();
}
const storeInfo = schema.objectStores[objectStoreName];
if (!storeInfo) {
return undefined;
}
return {
autoIncrement: storeInfo.autoIncrement,
indexSet: Object.keys(storeInfo.indexes).sort(),
keyPath: structuredClone(storeInfo.keyPath),
};
}
getIndexMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
indexName: string,
): IndexMeta | undefined {
const conn = this.connections[dbConn.connectionCookie];
if (!conn) {
throw Error("db connection not found");
}
let schema = conn.modifiedSchema;
if (!schema) {
throw Error();
}
const storeInfo = schema.objectStores[objectStoreName];
if (!storeInfo) {
return undefined;
}
const indexInfo = storeInfo.indexes[indexName];
if (!indexInfo) {
return;
}
return {
keyPath: structuredClone(indexInfo.keyPath),
multiEntry: indexInfo.multiEntry,
unique: indexInfo.unique,
};
}
} }
function getIndexRecords(req: { function getIndexRecords(req: {
@ -1734,7 +1788,6 @@ function getIndexRecords(req: {
function getObjectStoreRecords(req: { function getObjectStoreRecords(req: {
storeData: ISortedMapF<IDBValidKey, ObjectStoreRecord>; storeData: ISortedMapF<IDBValidKey, ObjectStoreRecord>;
lastIndexPosition?: IDBValidKey;
forward: boolean; forward: boolean;
range: IDBKeyRange; range: IDBKeyRange;
lastObjectStorePosition?: IDBValidKey; lastObjectStorePosition?: IDBValidKey;
@ -1743,7 +1796,6 @@ function getObjectStoreRecords(req: {
resultLevel: ResultLevel; resultLevel: ResultLevel;
}): RecordGetResponse { }): RecordGetResponse {
let numResults = 0; let numResults = 0;
const indexKeys: Key[] = [];
const primaryKeys: Key[] = []; const primaryKeys: Key[] = [];
const values: Value[] = []; const values: Value[] = [];
const { storeData, range, forward } = req; const { storeData, range, forward } = req;
@ -1751,8 +1803,7 @@ function getObjectStoreRecords(req: {
function packResult(): RecordGetResponse { function packResult(): RecordGetResponse {
return { return {
count: numResults, count: numResults,
indexKeys: indexKeys: undefined,
req.resultLevel >= ResultLevel.OnlyKeys ? indexKeys : undefined,
primaryKeys: primaryKeys:
req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined, req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
values: req.resultLevel >= ResultLevel.Full ? values : undefined, values: req.resultLevel >= ResultLevel.Full ? values : undefined,
@ -1762,8 +1813,8 @@ function getObjectStoreRecords(req: {
const rangeStart = forward ? range.lower : range.upper; const rangeStart = forward ? range.lower : range.upper;
const dataStart = forward ? storeData.minKey() : storeData.maxKey(); const dataStart = forward ? storeData.minKey() : storeData.maxKey();
let storePos = req.lastObjectStorePosition; let storePos = req.lastObjectStorePosition;
storePos = furthestKey(forward, storePos, rangeStart);
storePos = furthestKey(forward, storePos, dataStart); storePos = furthestKey(forward, storePos, dataStart);
storePos = furthestKey(forward, storePos, rangeStart);
storePos = furthestKey(forward, storePos, req.advancePrimaryKey); storePos = furthestKey(forward, storePos, req.advancePrimaryKey);
if (storePos != null) { if (storePos != null) {

View File

@ -0,0 +1,83 @@
/*
Copyright 2019 Florian Dold
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
import test from "ava";
import { createSqliteBackend } from "./SqliteBackend.js";
import { ResultLevel, StoreLevel } from "./backend-interface.js";
import { BridgeIDBKeyRange } from "./bridge-idb.js";
import * as fs from "node:fs";
import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
test("sqlite3 backend", async (t) => {
const filename = "mytestdb.sqlite3";
try {
fs.unlinkSync(filename);
} catch (e) {
// Do nothing.
}
try {
const sqlite3Impl = await createNodeSqlite3Impl();
const backend = await createSqliteBackend(sqlite3Impl, {
filename,
});
const dbConnRes = await backend.connectDatabase("mydb");
const dbConn = dbConnRes.conn;
const tx = await backend.enterVersionChange(dbConn, 1);
backend.createObjectStore(tx, "books", "isbn", true);
backend.createIndex(tx, "byName", "books", "name", false, false);
await backend.storeRecord(tx, {
objectStoreName: "books",
storeLevel: StoreLevel.AllowOverwrite,
value: { name: "foo" },
key: undefined,
});
const res = await backend.getObjectStoreRecords(tx, {
direction: "next",
limit: 1,
objectStoreName: "books",
resultLevel: ResultLevel.Full,
range: BridgeIDBKeyRange.only(1),
});
t.deepEqual(res.count, 1);
t.deepEqual(res.primaryKeys![0], 1);
t.deepEqual(res.values![0].name, "foo");
const indexRes = await backend.getIndexRecords(tx, {
direction: "next",
limit: 1,
objectStoreName: "books",
indexName: "byName",
resultLevel: ResultLevel.Full,
range: BridgeIDBKeyRange.only("foo"),
});
t.deepEqual(indexRes.count, 1);
t.deepEqual(indexRes.values![0].isbn, 1);
t.deepEqual(indexRes.values![0].name, "foo");
await backend.commit(tx);
const tx2 = await backend.beginTransaction(dbConn, ["books"], "readwrite");
await backend.commit(tx2);
await backend.close(dbConn);
t.pass();
} catch (e: any) {
console.log(e);
throw e;
}
});

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,29 @@
import { openPromise } from "./util/openPromise.js";
export class AsyncCondition {
_waitPromise: Promise<void>;
_resolveWaitPromise: () => void;
constructor() {
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
wait(): Promise<void> {
return this._waitPromise;
}
trigger(): void {
this._resolveWaitPromise();
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
}
export enum TransactionLevel {
None = 0,
Read = 1,
Write = 2,
VersionChange = 3,
}

View File

@ -21,66 +21,45 @@ import {
IDBValidKey, IDBValidKey,
} from "./idbtypes.js"; } from "./idbtypes.js";
/** @public */ export interface ConnectResult {
export interface ObjectStoreProperties { conn: DatabaseConnection;
keyPath: string[] | null; version: number;
autoIncrement: boolean; objectStores: string[];
indexes: { [nameame: string]: IndexProperties };
} }
/** @public */
export interface IndexProperties {
keyPath: string[];
multiEntry: boolean;
unique: boolean;
}
/** @public */
export interface Schema {
databaseName: string;
databaseVersion: number;
objectStores: { [name: string]: ObjectStoreProperties };
}
/** @public */
export interface DatabaseConnection { export interface DatabaseConnection {
connectionCookie: string; connectionCookie: string;
} }
/** @public */
export interface DatabaseTransaction { export interface DatabaseTransaction {
transactionCookie: string; transactionCookie: string;
} }
/** @public */
export enum ResultLevel { export enum ResultLevel {
OnlyCount, OnlyCount,
OnlyKeys, OnlyKeys,
Full, Full,
} }
/** @public */
export enum StoreLevel { export enum StoreLevel {
NoOverwrite, NoOverwrite,
AllowOverwrite, AllowOverwrite,
UpdateExisting, UpdateExisting,
} }
/** @public */
export interface RecordGetRequest { export interface IndexGetQuery {
direction: IDBCursorDirection; direction: IDBCursorDirection;
objectStoreName: string; objectStoreName: string;
indexName: string | undefined; indexName: string;
/** /**
* The range of keys to return. * The range of keys to return.
* If indexName is defined, the range refers to the index keys. * The range refers to the index keys.
* Otherwise it refers to the object store keys.
*/ */
range: BridgeIDBKeyRange | undefined | null; range: BridgeIDBKeyRange | undefined | null;
/** /**
* Last cursor position in terms of the index key. * Last cursor position in terms of the index key.
* Can only be specified if indexName is defined and * Can only be specified if lastObjectStorePosition is defined.
* lastObjectStorePosition is defined.
* *
* Must either be undefined or within range. * Must either be undefined or within range.
*/ */
@ -92,8 +71,6 @@ export interface RecordGetRequest {
/** /**
* If specified, the index key of the results must be * If specified, the index key of the results must be
* greater or equal to advanceIndexKey. * greater or equal to advanceIndexKey.
*
* Only applicable if indexName is specified.
*/ */
advanceIndexKey?: IDBValidKey; advanceIndexKey?: IDBValidKey;
/** /**
@ -109,7 +86,31 @@ export interface RecordGetRequest {
resultLevel: ResultLevel; resultLevel: ResultLevel;
} }
/** @public */ export interface ObjectStoreGetQuery {
direction: IDBCursorDirection;
objectStoreName: string;
/**
* The range of keys to return.
* Refers to the object store keys.
*/
range: BridgeIDBKeyRange | undefined | null;
/**
* Last position in terms of the object store key.
*/
lastObjectStorePosition?: IDBValidKey;
/**
* If specified, the primary key of the results must be greater
* or equal to advancePrimaryKey.
*/
advancePrimaryKey?: IDBValidKey;
/**
* Maximum number of results to return.
* If 0, return all available results
*/
limit: number;
resultLevel: ResultLevel;
}
export interface RecordGetResponse { export interface RecordGetResponse {
values: any[] | undefined; values: any[] | undefined;
indexKeys: IDBValidKey[] | undefined; indexKeys: IDBValidKey[] | undefined;
@ -117,7 +118,6 @@ export interface RecordGetResponse {
count: number; count: number;
} }
/** @public */
export interface RecordStoreRequest { export interface RecordStoreRequest {
objectStoreName: string; objectStoreName: string;
value: any; value: any;
@ -125,7 +125,6 @@ export interface RecordStoreRequest {
storeLevel: StoreLevel; storeLevel: StoreLevel;
} }
/** @public */
export interface RecordStoreResponse { export interface RecordStoreResponse {
/** /**
* Key that the record was stored under in the object store. * Key that the record was stored under in the object store.
@ -133,38 +132,79 @@ export interface RecordStoreResponse {
key: IDBValidKey; key: IDBValidKey;
} }
/** @public */ export interface ObjectStoreMeta {
indexSet: string[];
keyPath: string | string[] | null;
autoIncrement: boolean;
}
export interface IndexMeta {
keyPath: string | string[];
multiEntry: boolean;
unique: boolean;
}
// FIXME: Instead of refering to an object store by name,
// maybe refer to it via some internal, numeric ID?
// This would simplify renaming.
export interface Backend { export interface Backend {
getDatabases(): Promise<BridgeIDBDatabaseInfo[]>; getDatabases(): Promise<BridgeIDBDatabaseInfo[]>;
connectDatabase(name: string): Promise<DatabaseConnection>; connectDatabase(name: string): Promise<ConnectResult>;
beginTransaction( beginTransaction(
conn: DatabaseConnection, dbConn: DatabaseConnection,
objectStores: string[], objectStores: string[],
mode: IDBTransactionMode, mode: IDBTransactionMode,
): Promise<DatabaseTransaction>; ): Promise<DatabaseTransaction>;
enterVersionChange( enterVersionChange(
conn: DatabaseConnection, dbConn: DatabaseConnection,
newVersion: number, newVersion: number,
): Promise<DatabaseTransaction>; ): Promise<DatabaseTransaction>;
deleteDatabase(name: string): Promise<void>; deleteDatabase(name: string): Promise<void>;
close(db: DatabaseConnection): Promise<void>; close(dbConn: DatabaseConnection): Promise<void>;
getSchema(db: DatabaseConnection): Schema; // FIXME: Use this for connection
// prepareConnect() - acquires a lock, maybe enters a version change transaction?
// finishConnect() - after possible versionchange is done, allow others to connect
getCurrentTransactionSchema(btx: DatabaseTransaction): Schema; /**
* Get metadata for an object store.
*
* When dbConn is running a version change transaction,
* the current schema (and not the initial schema) is returned.
*
* Caller may mutate the result, a new object
* is returned on each call.
*/
getObjectStoreMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
): ObjectStoreMeta | undefined;
getInitialTransactionSchema(btx: DatabaseTransaction): Schema; /**
* Get metadata for an index.
*
* When dbConn is running a version change transaction,
* the current schema (and not the initial schema) is returned.
*
* Caller may mutate the result, a new object
* is returned on each call.
*/
getIndexMeta(
dbConn: DatabaseConnection,
objectStoreName: string,
indexName: string,
): IndexMeta | undefined;
renameIndex( renameIndex(
btx: DatabaseTransaction, btx: DatabaseTransaction,
objectStoreName: string, objectStoreName: string,
oldName: string, oldIndexName: string,
newName: string, newIndexName: string,
): void; ): void;
deleteIndex( deleteIndex(
@ -173,8 +213,9 @@ export interface Backend {
indexName: string, indexName: string,
): void; ): void;
rollback(btx: DatabaseTransaction): Promise<void>; rollback(btx: DatabaseTransaction): void;
// FIXME: Should probably not be async
commit(btx: DatabaseTransaction): Promise<void>; commit(btx: DatabaseTransaction): Promise<void>;
deleteObjectStore(btx: DatabaseTransaction, name: string): void; deleteObjectStore(btx: DatabaseTransaction, name: string): void;
@ -207,9 +248,14 @@ export interface Backend {
range: BridgeIDBKeyRange, range: BridgeIDBKeyRange,
): Promise<void>; ): Promise<void>;
getRecords( getObjectStoreRecords(
btx: DatabaseTransaction, btx: DatabaseTransaction,
req: RecordGetRequest, req: ObjectStoreGetQuery,
): Promise<RecordGetResponse>;
getIndexRecords(
btx: DatabaseTransaction,
req: IndexGetQuery,
): Promise<RecordGetResponse>; ): Promise<RecordGetResponse>;
storeRecord( storeRecord(

View File

@ -0,0 +1,740 @@
/*
Copyright 2019 Florian Dold
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
/**
* Tests that are backend-generic.
* See testingdb.ts for the backend selection in test runs.
*/
/**
* Imports.
*/
import test from "ava";
import {
BridgeIDBCursorWithValue,
BridgeIDBDatabase,
BridgeIDBFactory,
BridgeIDBKeyRange,
BridgeIDBTransaction,
} from "./bridge-idb.js";
import {
IDBCursorDirection,
IDBCursorWithValue,
IDBDatabase,
IDBKeyRange,
IDBRequest,
IDBValidKey,
} from "./idbtypes.js";
import { initTestIndexedDB, useTestIndexedDb } from "./testingdb.js";
import { MemoryBackend } from "./MemoryBackend.js";
import { promiseFromRequest, promiseFromTransaction } from "./idbpromutil.js";
test.before("test DB initialization", initTestIndexedDB);
test("Spec: Example 1 Part 1", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result as BridgeIDBDatabase;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
// Populate with initial data.
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
};
await promiseFromRequest(request);
t.pass();
});
test("Spec: Example 1 Part 2", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
t.pass();
});
test("duplicate index insertion", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
// Change the index key, keep primary key (isbn) the same.
store.put({ title: "Water Buffaloes", author: "Bla", isbn: 234567 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor();
const authorList: string[] = [];
await promiseFromRequest(request3);
while (request3.result != null) {
const cursor: IDBCursorWithValue = request3.result;
authorList.push(cursor.value.author);
cursor.continue();
await promiseFromRequest(request3);
}
t.deepEqual(authorList, ["Barney", "Fred", "Fred"]);
t.pass();
});
test("simple index iteration", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("books", "readwrite");
const store = tx.objectStore("books");
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
await promiseFromTransaction(tx);
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
await promiseFromRequest(request3);
let cursor: BridgeIDBCursorWithValue | null;
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request3);
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 234567);
cursor.continue();
await promiseFromRequest(request3);
t.is(cursor.value, undefined);
});
test("Spec: Example 1 Part 3", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
const tx = db.transaction("books", "readwrite");
const store = tx.objectStore("books");
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readonly");
const store2 = tx2.objectStore("books");
var index2 = store2.index("by_title");
const request2 = index2.get("Bedrock Nights");
const result2: any = await promiseFromRequest(request2);
t.is(result2.author, "Barney");
const tx3 = db.transaction(["books"], "readonly");
const store3 = tx3.objectStore("books");
const index3 = store3.index("by_author");
const request3 = index3.openCursor(BridgeIDBKeyRange.only("Fred"));
await promiseFromRequest(request3);
let cursor: BridgeIDBCursorWithValue | null;
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request3);
cursor = request3.result as BridgeIDBCursorWithValue;
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 234567);
await promiseFromTransaction(tx3);
const tx4 = db.transaction("books", "readonly");
const store4 = tx4.objectStore("books");
const request4 = store4.openCursor();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 234567);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.isbn, 345678);
cursor.continue();
await promiseFromRequest(request4);
cursor = request4.result;
t.is(cursor, null);
const tx5 = db.transaction("books", "readonly");
const store5 = tx5.objectStore("books");
const index5 = store5.index("by_author");
const request5 = index5.openCursor(null, "next");
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
cursor.continue();
await promiseFromRequest(request5);
cursor = request5.result;
t.is(cursor, null);
const request6 = index5.openCursor(null, "nextunique");
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request6);
cursor = request6.result;
t.is(cursor, null);
console.log("---------------------------");
const request7 = index5.openCursor(null, "prevunique");
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Fred");
t.is(cursor.value.isbn, 123456);
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
if (!cursor) {
throw new Error();
}
t.is(cursor.value.author, "Barney");
cursor.continue();
await promiseFromRequest(request7);
cursor = request7.result;
t.is(cursor, null);
db.close();
t.pass();
});
test("simple deletion", async (t) => {
const idb = useTestIndexedDb();
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const tx2 = db.transaction("books", "readwrite");
const store2 = tx2.objectStore("books");
const req1 = store2.get(234567);
await promiseFromRequest(req1);
t.is(req1.readyState, "done");
t.is(req1.result.author, "Fred");
store2.delete(123456);
const req2 = store2.get(123456);
await promiseFromRequest(req2);
t.is(req2.readyState, "done");
t.is(req2.result, undefined);
const req3 = store2.get(234567);
await promiseFromRequest(req3);
t.is(req3.readyState, "done");
t.is(req3.result.author, "Fred");
await promiseFromTransaction(tx2);
t.pass();
});
test("export", async (t) => {
const backend = new MemoryBackend();
const idb = new BridgeIDBFactory(backend);
const dbname = "library-" + new Date().getTime() + Math.random();
const request = idb.open(dbname, 42);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("books", { keyPath: "isbn" });
const titleIndex = store.createIndex("by_title", "title", { unique: true });
const authorIndex = store.createIndex("by_author", "author");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("books", "readwrite");
tx.oncomplete = () => {
console.log("oncomplete called");
};
const store = tx.objectStore("books");
store.put({ title: "Quarry Memories", author: "Fred", isbn: 123456 });
store.put({ title: "Water Buffaloes", author: "Fred", isbn: 234567 });
store.put({ title: "Bedrock Nights", author: "Barney", isbn: 345678 });
await promiseFromTransaction(tx);
const exportedData = backend.exportDump();
const backend2 = new MemoryBackend();
backend2.importDump(exportedData);
const exportedData2 = backend2.exportDump();
t.assert(
exportedData.databases[dbname].objectStores["books"].records.length ===
3,
);
t.deepEqual(exportedData, exportedData2);
t.is(exportedData.databases[dbname].schema.databaseVersion, 42);
t.is(exportedData2.databases[dbname].schema.databaseVersion, 42);
t.pass();
});
test("update with non-existent index values", async (t) => {
const idb = useTestIndexedDb();
const dbname = "mydb-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_z");
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_z");
{
const indRes = await promiseFromRequest(index.get(42));
t.is(indRes, undefined);
}
const res = await promiseFromRequest(store.get(0));
t.is(res.z, undefined);
await promiseFromTransaction(tx);
}
t.pass();
});
test("delete from unique index", async (t) => {
const idb = useTestIndexedDb();
const dbname = "mydb-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result as IDBDatabase;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_yz", ["y", "z"], {
unique: true,
});
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
t.is(db.name, dbname);
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42 });
const index = store.index("by_yz");
const indRes = await promiseFromRequest(index.get(["a", 42]));
t.is(indRes.x, 0);
const res = await promiseFromRequest(store.get(0));
t.is(res.z, 42);
await promiseFromTransaction(tx);
}
{
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a", z: 42, extra: 123 });
await promiseFromTransaction(tx);
}
t.pass();
});
test("range queries", async (t) => {
const idb = useTestIndexedDb();
const dbname = "mydb-" + new Date().getTime() + Math.random();
const request = idb.open(dbname);
request.onupgradeneeded = () => {
const db = request.result;
const store = db.createObjectStore("bla", { keyPath: "x" });
store.createIndex("by_y", "y");
store.createIndex("by_z", "z");
};
const db: BridgeIDBDatabase = await promiseFromRequest(request);
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
store.put({ x: 0, y: "a" });
store.put({ x: 2, y: "a" });
store.put({ x: 4, y: "b" });
store.put({ x: 8, y: "b" });
store.put({ x: 10, y: "c" });
store.put({ x: 12, y: "c" });
await promiseFromTransaction(tx);
async function doCursorStoreQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const vals: any[] = [];
const req = store.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
async function doCursorIndexQuery(
range: IDBKeyRange | IDBValidKey | undefined,
direction: IDBCursorDirection | undefined,
expected: any[],
): Promise<void> {
const tx = db.transaction("bla", "readwrite");
const store = tx.objectStore("bla");
const index = store.index("by_y");
const vals: any[] = [];
const req = index.openCursor(range, direction);
while (1) {
await promiseFromRequest(req);
const cursor: IDBCursorWithValue = req.result;
if (!cursor) {
break;
}
cursor.continue();
vals.push(cursor.value);
}
await promiseFromTransaction(tx);
t.deepEqual(vals, expected);
}
await doCursorStoreQuery(undefined, undefined, [
{
x: 0,
y: "a",
},
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
{
x: 12,
y: "c",
},
]);
await doCursorStoreQuery(
BridgeIDBKeyRange.bound(0, 12, true, true),
undefined,
[
{
x: 2,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
{
x: 10,
y: "c",
},
],
);
await doCursorIndexQuery(
BridgeIDBKeyRange.bound("a", "c", true, true),
undefined,
[
{
x: 4,
y: "b",
},
{
x: 8,
y: "b",
},
],
);
await doCursorIndexQuery(undefined, "nextunique", [
{
x: 0,
y: "a",
},
{
x: 4,
y: "b",
},
{
x: 10,
y: "c",
},
]);
await doCursorIndexQuery(undefined, "prevunique", [
{
x: 10,
y: "c",
},
{
x: 4,
y: "b",
},
{
x: 0,
y: "a",
},
]);
db.close();
t.pass();
});

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test abort-in-initial-upgradeneeded.htm", async (t) => { test("WPT test abort-in-initial-upgradeneeded.htm", async (t) => {
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// When db.close is called in upgradeneeded, the db is cleaned up on refresh // When db.close is called in upgradeneeded, the db is cleaned up on refresh
test("WPT test close-in-upgradeneeded.htm", (t) => { test("WPT test close-in-upgradeneeded.htm", (t) => {

View File

@ -1,7 +1,9 @@
import test from "ava"; import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js"; import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { IDBRequest } from "../idbtypes.js"; import { IDBRequest } from "../idbtypes.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
const IDBKeyRange = BridgeIDBKeyRange; const IDBKeyRange = BridgeIDBKeyRange;

View File

@ -2,10 +2,13 @@ import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js"; import { BridgeIDBRequest } from "../bridge-idb.js";
import { import {
indexeddb_test, indexeddb_test,
initTestIndexedDB,
is_transaction_active, is_transaction_active,
keep_alive, keep_alive,
} from "./wptsupport.js"; } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test abort-in-initial-upgradeneeded.htm (subtest 1)", async (t) => { test("WPT test abort-in-initial-upgradeneeded.htm (subtest 1)", async (t) => {
// Transactions are active during success handlers // Transactions are active during success handlers
await indexeddb_test( await indexeddb_test(

View File

@ -1,6 +1,8 @@
import test from "ava"; import test from "ava";
import { BridgeIDBCursor,BridgeIDBRequest } from "../bridge-idb.js"; import { BridgeIDBCursor,BridgeIDBRequest } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test idbcursor_advance_index.htm", async (t) => { test("WPT test idbcursor_advance_index.htm", async (t) => {
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {

View File

@ -1,6 +1,9 @@
import test from "ava"; import test from "ava";
import { BridgeIDBCursor, BridgeIDBCursorWithValue } from "../bridge-idb.js"; import { BridgeIDBCursor, BridgeIDBCursorWithValue } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
import { IDBDatabase } from "../idbtypes.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test idbcursor_continue_index.htm", (t) => { test("WPT test idbcursor_continue_index.htm", (t) => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@ -209,7 +212,7 @@ test("WPT idbcursor-continue-index4.htm", (t) => {
// IDBCursor.continue() - index - iterate using 'prevunique' // IDBCursor.continue() - index - iterate using 'prevunique'
test("WPT idbcursor-continue-index5.htm", (t) => { test("WPT idbcursor-continue-index5.htm", (t) => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
var db: any; var db: IDBDatabase;
const records = [ const records = [
{ pKey: "primaryKey_0", iKey: "indexKey_0" }, { pKey: "primaryKey_0", iKey: "indexKey_0" },
{ pKey: "primaryKey_1", iKey: "indexKey_1" }, { pKey: "primaryKey_1", iKey: "indexKey_1" },

View File

@ -1,7 +1,9 @@
import test from "ava"; import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js"; import { BridgeIDBCursor } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js"; import { IDBDatabase } from "../idbtypes.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.continue() - object store - iterate to the next record // IDBCursor.continue() - object store - iterate to the next record
test("WPT test idbcursor_continue_objectstore.htm", (t) => { test("WPT test idbcursor_continue_objectstore.htm", (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { indexeddb_test } from "./wptsupport.js"; import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT idbcursor-delete-exception-order.htm", async (t) => { test("WPT idbcursor-delete-exception-order.htm", async (t) => {
// 'IDBCursor.delete exception order: TransactionInactiveError vs. ReadOnlyError' // 'IDBCursor.delete exception order: TransactionInactiveError vs. ReadOnlyError'

View File

@ -1,7 +1,9 @@
import test from "ava"; import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js"; import { BridgeIDBCursor } from "../bridge-idb.js";
import { IDBCursor } from "../idbtypes.js"; import { IDBCursor } from "../idbtypes.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.delete() - index - remove a record from the object store // IDBCursor.delete() - index - remove a record from the object store
test("WPT idbcursor-delete-index.htm", (t) => { test("WPT idbcursor-delete-index.htm", (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava"; import test from "ava";
import { BridgeIDBCursor } from "../bridge-idb.js"; import { BridgeIDBCursor } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.delete() - object store - remove a record from the object store // IDBCursor.delete() - object store - remove a record from the object store
test("WPT idbcursor-delete-objectstore.htm", (t) => { test("WPT idbcursor-delete-objectstore.htm", (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT idbcursor-reused.htm", async (t) => { test("WPT idbcursor-reused.htm", async (t) => {
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {

View File

@ -3,10 +3,13 @@ import { BridgeIDBCursor, BridgeIDBKeyRange } from "../bridge-idb.js";
import { import {
createDatabase, createDatabase,
createdb, createdb,
initTestIndexedDB,
promiseForRequest, promiseForRequest,
promiseForTransaction, promiseForTransaction,
} from "./wptsupport.js"; } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBCursor.update() - index - modify a record in the object store // IDBCursor.update() - index - modify a record in the object store
test("WPT test idbcursor_update_index.htm", (t) => { test("WPT test idbcursor_update_index.htm", (t) => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {

View File

@ -1,8 +1,10 @@
import test from "ava"; import test from "ava";
import { idbFactory } from "./wptsupport.js"; import { initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT idbfactory-cmp*.html", async (t) => { test("WPT idbfactory-cmp*.html", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
var greater = indexedDB.cmp(2, 1); var greater = indexedDB.cmp(2, 1);
var equal = indexedDB.cmp(2, 2); var equal = indexedDB.cmp(2, 2);
var less = indexedDB.cmp(1, 2); var less = indexedDB.cmp(1, 2);

View File

@ -1,7 +1,10 @@
import test from "ava"; import test from "ava";
import { BridgeIDBVersionChangeEvent } from "../bridge-idb.js"; import { BridgeIDBVersionChangeEvent } from "../bridge-idb.js";
import FakeEvent from "../util/FakeEvent.js"; import FakeEvent from "../util/FakeEvent.js";
import { createdb, format_value, idbFactory } from "./wptsupport.js"; import { createdb, format_value, initTestIndexedDB, useTestIndexedDb } from "./wptsupport.js";
import { IDBDatabase } from "../idbtypes.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBFactory.open() - request has no source // IDBFactory.open() - request has no source
test("WPT idbfactory-open.htm", async (t) => { test("WPT idbfactory-open.htm", async (t) => {
@ -36,7 +39,7 @@ test("WPT idbfactory-open2.htm", async (t) => {
// IDBFactory.open() - no version opens current database // IDBFactory.open() - no version opens current database
test("WPT idbfactory-open3.htm", async (t) => { test("WPT idbfactory-open3.htm", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13); var open_rq = createdb(t, undefined, 13);
var did_upgrade = false; var did_upgrade = false;
@ -61,7 +64,6 @@ test("WPT idbfactory-open3.htm", async (t) => {
// IDBFactory.open() - new database has default version // IDBFactory.open() - new database has default version
test("WPT idbfactory-open4.htm", async (t) => { test("WPT idbfactory-open4.htm", async (t) => {
const indexedDB = idbFactory;
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, t.title + "-database_name"); var open_rq = createdb(t, t.title + "-database_name");
@ -78,7 +80,6 @@ test("WPT idbfactory-open4.htm", async (t) => {
// IDBFactory.open() - new database is empty // IDBFactory.open() - new database is empty
test("WPT idbfactory-open5.htm", async (t) => { test("WPT idbfactory-open5.htm", async (t) => {
const indexedDB = idbFactory;
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, t.title + "-database_name"); var open_rq = createdb(t, t.title + "-database_name");
@ -97,7 +98,7 @@ test("WPT idbfactory-open5.htm", async (t) => {
// IDBFactory.open() - open database with a lower version than current // IDBFactory.open() - open database with a lower version than current
test("WPT idbfactory-open6.htm", async (t) => { test("WPT idbfactory-open6.htm", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13); var open_rq = createdb(t, undefined, 13);
var open_rq2: any; var open_rq2: any;
@ -131,7 +132,7 @@ test("WPT idbfactory-open6.htm", async (t) => {
// IDBFactory.open() - open database with a higher version than current // IDBFactory.open() - open database with a higher version than current
test("WPT idbfactory-open7.htm", async (t) => { test("WPT idbfactory-open7.htm", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13); var open_rq = createdb(t, undefined, 13);
var did_upgrade = false; var did_upgrade = false;
@ -169,7 +170,7 @@ test("WPT idbfactory-open7.htm", async (t) => {
// IDBFactory.open() - error in version change transaction aborts open // IDBFactory.open() - error in version change transaction aborts open
test("WPT idbfactory-open8.htm", async (t) => { test("WPT idbfactory-open8.htm", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var open_rq = createdb(t, undefined, 13); var open_rq = createdb(t, undefined, 13);
var did_upgrade = false; var did_upgrade = false;
@ -193,7 +194,7 @@ test("WPT idbfactory-open8.htm", async (t) => {
// IDBFactory.open() - errors in version argument // IDBFactory.open() - errors in version argument
test("WPT idbfactory-open9.htm", async (t) => { test("WPT idbfactory-open9.htm", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
function should_throw(val: any, name?: string) { function should_throw(val: any, name?: string) {
if (!name) { if (!name) {
name = typeof val == "object" && val ? "object" : format_value(val); name = typeof val == "object" && val ? "object" : format_value(val);
@ -281,9 +282,9 @@ test("WPT idbfactory-open9.htm", async (t) => {
// IDBFactory.open() - error in version change transaction aborts open // IDBFactory.open() - error in version change transaction aborts open
test("WPT idbfactory-open10.htm", async (t) => { test("WPT idbfactory-open10.htm", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var db: any, db2: any; var db: IDBDatabase, db2: IDBDatabase;
var open_rq = createdb(t, undefined, 9); var open_rq = createdb(t, undefined, 9);
open_rq.onupgradeneeded = function (e: any) { open_rq.onupgradeneeded = function (e: any) {
@ -350,7 +351,7 @@ test("WPT idbfactory-open10.htm", async (t) => {
var open_rq3 = indexedDB.open(db.name); var open_rq3 = indexedDB.open(db.name);
open_rq3.onsuccess = function (e: any) { open_rq3.onsuccess = function (e: any) {
var db3 = e.target.result; var db3: IDBDatabase = e.target.result;
t.true( t.true(
db3.objectStoreNames.contains("store"), db3.objectStoreNames.contains("store"),
@ -407,7 +408,7 @@ test("WPT idbfactory-open10.htm", async (t) => {
// IDBFactory.open() - second open's transaction is available to get objectStores // IDBFactory.open() - second open's transaction is available to get objectStores
test("WPT idbfactory-open11.htm", async (t) => { test("WPT idbfactory-open11.htm", async (t) => {
const indexedDB = idbFactory; const indexedDB = useTestIndexedDb();
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var db: any; var db: any;
var count_done = 0; var count_done = 0;
@ -472,8 +473,6 @@ test("WPT idbfactory-open11.htm", async (t) => {
// IDBFactory.open() - upgradeneeded gets VersionChangeEvent // IDBFactory.open() - upgradeneeded gets VersionChangeEvent
test("WPT idbfactory-open12.htm", async (t) => { test("WPT idbfactory-open12.htm", async (t) => {
const indexedDB = idbFactory;
var db: any; var db: any;
var open_rq = createdb(t, undefined, 9); var open_rq = createdb(t, undefined, 9);

View File

@ -1,7 +1,9 @@
import test from "ava"; import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js"; import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js"; import { IDBDatabase } from "../idbtypes.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBIndex.get() - returns the record // IDBIndex.get() - returns the record
test("WPT idbindex_get.htm", async (t) => { test("WPT idbindex_get.htm", async (t) => {
@ -93,7 +95,7 @@ test("WPT idbindex_get3.htm", async (t) => {
// IDBIndex.get() - returns the record with the first key in the range // IDBIndex.get() - returns the record with the first key in the range
test("WPT idbindex_get4.htm", async (t) => { test("WPT idbindex_get4.htm", async (t) => {
await new Promise<void>((resolve, reject) => { await new Promise<void>((resolve, reject) => {
var db: any; var db: IDBDatabase;
var open_rq = createdb(t); var open_rq = createdb(t);

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBIndex.openCursor() - throw InvalidStateError when the index is deleted // IDBIndex.openCursor() - throw InvalidStateError when the index is deleted
test("WPT test idbindex-openCursor.htm", (t) => { test("WPT test idbindex-openCursor.htm", (t) => {

View File

@ -1,5 +1,7 @@
import test, { ExecutionContext } from "ava"; import test, { ExecutionContext } from "ava";
import { indexeddb_test } from "./wptsupport.js"; import { indexeddb_test, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
async function t1(t: ExecutionContext, method: string): Promise<void> { async function t1(t: ExecutionContext, method: string): Promise<void> {
await indexeddb_test( await indexeddb_test(
@ -55,8 +57,6 @@ async function t2(t: ExecutionContext, method: string): Promise<void> {
done(); done();
}, 0); }, 0);
console.log(`queued task for ${method}`);
}, },
"t2", "t2",
); );

View File

@ -1,7 +1,9 @@
import test from "ava"; import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js"; import { BridgeIDBRequest } from "../bridge-idb.js";
import { IDBDatabase } from "../idbtypes.js"; import { IDBDatabase } from "../idbtypes.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.add() - add with an inline key // IDBObjectStore.add() - add with an inline key
test("WPT idbobjectstore_add.htm", async (t) => { test("WPT idbobjectstore_add.htm", async (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava"; import test from "ava";
import { BridgeIDBKeyRange } from "../bridge-idb.js"; import { BridgeIDBKeyRange } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.get() - key is a number // IDBObjectStore.get() - key is a number
test("WPT idbobjectstore_get.htm", (t) => { test("WPT idbobjectstore_get.htm", (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava"; import test from "ava";
import { BridgeIDBRequest } from "../bridge-idb.js"; import { BridgeIDBRequest } from "../bridge-idb.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBObjectStore.put() - put with an inline key // IDBObjectStore.put() - put with an inline key
test("WPT idbobjectstore_put.htm", (t) => { test("WPT idbobjectstore_put.htm", (t) => {

View File

@ -6,9 +6,12 @@ import {
createBooksStore, createBooksStore,
createDatabase, createDatabase,
createNotBooksStore, createNotBooksStore,
initTestIndexedDB,
migrateDatabase, migrateDatabase,
} from "./wptsupport.js"; } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IndexedDB: object store renaming support // IndexedDB: object store renaming support
// IndexedDB object store rename in new transaction // IndexedDB object store rename in new transaction
test("WPT idbobjectstore-rename-store.html (subtest 1)", async (t) => { test("WPT idbobjectstore-rename-store.html (subtest 1)", async (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// IDBTransaction - complete event // IDBTransaction - complete event
test("WPT idbtransaction-oncomplete.htm", async (t) => { test("WPT idbtransaction-oncomplete.htm", async (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { assert_key_equals, createdb } from "./wptsupport.js"; import { assert_key_equals, createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test keypath.htm", async (t) => { test("WPT test keypath.htm", async (t) => {
function keypath( function keypath(
@ -9,8 +11,6 @@ test("WPT test keypath.htm", async (t) => {
desc?: string, desc?: string,
) { ) {
return new Promise<void>((resolve, reject) => { return new Promise<void>((resolve, reject) => {
console.log("key path", keypath);
console.log("checking", desc);
let db: any; let db: any;
const store_name = "store-" + Date.now() + Math.random(); const store_name = "store-" + Date.now() + Math.random();

View File

@ -1,6 +1,8 @@
import test from "ava"; import test from "ava";
import { EventTarget } from "../idbtypes.js"; import { EventTarget } from "../idbtypes.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// Bubbling and capturing of request events // Bubbling and capturing of request events
test("WPT request_bubble-and-capture.htm", async (t) => { test("WPT request_bubble-and-capture.htm", async (t) => {

View File

@ -1,5 +1,7 @@
import test from "ava"; import test from "ava";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
// Transactions have a request queue // Transactions have a request queue
test("transaction-requestqueue.htm", async (t) => { test("transaction-requestqueue.htm", async (t) => {

View File

@ -1,6 +1,8 @@
import test from "ava"; import test from "ava";
import { IDBVersionChangeEvent } from "../idbtypes.js"; import { IDBVersionChangeEvent } from "../idbtypes.js";
import { createdb } from "./wptsupport.js"; import { createdb, initTestIndexedDB } from "./wptsupport.js";
test.before("test DB initialization", initTestIndexedDB);
test("WPT test value.htm, array", (t) => { test("WPT test value.htm, array", (t) => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@ -12,7 +14,6 @@ test("WPT test value.htm, array", (t) => {
createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) { createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) {
(e.target as any).result.createObjectStore("store").add(value, 1); (e.target as any).result.createObjectStore("store").add(value, 1);
(e.target as any).onsuccess = (e: any) => { (e.target as any).onsuccess = (e: any) => {
console.log("in first onsuccess");
e.target.result e.target.result
.transaction("store") .transaction("store")
.objectStore("store") .objectStore("store")
@ -35,13 +36,10 @@ test("WPT test value.htm, date", (t) => {
createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) { createdb(t).onupgradeneeded = function (e: IDBVersionChangeEvent) {
(e.target as any).result.createObjectStore("store").add(value, 1); (e.target as any).result.createObjectStore("store").add(value, 1);
(e.target as any).onsuccess = (e: any) => { (e.target as any).onsuccess = (e: any) => {
console.log("in first onsuccess");
e.target.result e.target.result
.transaction("store") .transaction("store")
.objectStore("store") .objectStore("store")
.get(1).onsuccess = (e: any) => { .get(1).onsuccess = (e: any) => {
console.log("target", e.target);
console.log("result", e.target.result);
t.assert(e.target.result instanceof _instanceof, "instanceof"); t.assert(e.target.result instanceof _instanceof, "instanceof");
resolve(); resolve();
}; };

View File

@ -1,5 +1,5 @@
import { ExecutionContext } from "ava"; import { ExecutionContext } from "ava";
import { BridgeIDBFactory, BridgeIDBRequest } from "../bridge-idb.js"; import { BridgeIDBRequest } from "../bridge-idb.js";
import { import {
IDBDatabase, IDBDatabase,
IDBIndex, IDBIndex,
@ -8,17 +8,10 @@ import {
IDBRequest, IDBRequest,
IDBTransaction, IDBTransaction,
} from "../idbtypes.js"; } from "../idbtypes.js";
import { MemoryBackend } from "../MemoryBackend.js"; import { initTestIndexedDB , useTestIndexedDb } from "../testingdb.js";
import { compareKeys } from "../util/cmp.js"; import { compareKeys } from "../util/cmp.js";
BridgeIDBFactory.enableTracing = true; export { initTestIndexedDB, useTestIndexedDb } from "../testingdb.js"
const backend = new MemoryBackend();
backend.enableTracing = true;
export const idbFactory = new BridgeIDBFactory(backend);
const self = {
indexedDB: idbFactory,
};
export function createdb( export function createdb(
t: ExecutionContext<unknown>, t: ExecutionContext<unknown>,
@ -27,8 +20,8 @@ export function createdb(
): IDBOpenDBRequest { ): IDBOpenDBRequest {
var rq_open: IDBOpenDBRequest; var rq_open: IDBOpenDBRequest;
dbname = dbname ? dbname : "testdb-" + new Date().getTime() + Math.random(); dbname = dbname ? dbname : "testdb-" + new Date().getTime() + Math.random();
if (version) rq_open = self.indexedDB.open(dbname, version); if (version) rq_open = useTestIndexedDb().open(dbname, version);
else rq_open = self.indexedDB.open(dbname); else rq_open = useTestIndexedDb().open(dbname);
return rq_open; return rq_open;
} }
@ -111,7 +104,7 @@ export async function migrateNamedDatabase(
migrationCallback: MigrationCallback, migrationCallback: MigrationCallback,
): Promise<IDBDatabase> { ): Promise<IDBDatabase> {
return new Promise<IDBDatabase>((resolve, reject) => { return new Promise<IDBDatabase>((resolve, reject) => {
const request = self.indexedDB.open(databaseName, newVersion); const request = useTestIndexedDb().open(databaseName, newVersion);
request.onupgradeneeded = (event: any) => { request.onupgradeneeded = (event: any) => {
const database = event.target.result; const database = event.target.result;
const transaction = event.target.transaction; const transaction = event.target.transaction;
@ -175,7 +168,7 @@ export async function createDatabase(
setupCallback: MigrationCallback, setupCallback: MigrationCallback,
): Promise<IDBDatabase> { ): Promise<IDBDatabase> {
const databaseName = makeDatabaseName(t.title); const databaseName = makeDatabaseName(t.title);
const request = self.indexedDB.deleteDatabase(databaseName); const request = useTestIndexedDb().deleteDatabase(databaseName);
return migrateNamedDatabase(t, databaseName, 1, setupCallback); return migrateNamedDatabase(t, databaseName, 1, setupCallback);
} }
@ -463,9 +456,9 @@ export function indexeddb_test(
options = Object.assign({ upgrade_will_abort: false }, options); options = Object.assign({ upgrade_will_abort: false }, options);
const dbname = const dbname =
"testdb-" + new Date().getTime() + Math.random() + (dbsuffix ?? ""); "testdb-" + new Date().getTime() + Math.random() + (dbsuffix ?? "");
var del = self.indexedDB.deleteDatabase(dbname); var del = useTestIndexedDb().deleteDatabase(dbname);
del.onerror = () => t.fail("deleteDatabase should succeed"); del.onerror = () => t.fail("deleteDatabase should succeed");
var open = self.indexedDB.open(dbname, 1); var open = useTestIndexedDb().open(dbname, 1);
open.onupgradeneeded = function () { open.onupgradeneeded = function () {
var db = open.result; var db = open.result;
t.teardown(function () { t.teardown(function () {
@ -474,7 +467,7 @@ export function indexeddb_test(
e.preventDefault(); e.preventDefault();
}; };
db.close(); db.close();
self.indexedDB.deleteDatabase(db.name); useTestIndexedDb().deleteDatabase(db.name);
}); });
var tx = open.transaction!; var tx = open.transaction!;
upgrade_func(resolve, db, tx, open); upgrade_func(resolve, db, tx, open);

View File

@ -0,0 +1,26 @@
import { BridgeIDBTransaction } from "./bridge-idb.js";
import { IDBRequest } from "./idbtypes.js";
export function promiseFromRequest(request: IDBRequest): Promise<any> {
return new Promise((resolve, reject) => {
request.onsuccess = () => {
resolve(request.result);
};
request.onerror = () => {
reject(request.error);
};
});
}
export function promiseFromTransaction(
transaction: BridgeIDBTransaction,
): Promise<void> {
return new Promise<void>((resolve, reject) => {
transaction.oncomplete = () => {
resolve();
};
transaction.onerror = () => {
reject();
};
});
}

View File

@ -19,48 +19,27 @@ and limitations under the License.
* Instead of ambient types, we export type declarations. * Instead of ambient types, we export type declarations.
*/ */
/**
* @public
*/
export type IDBKeyPath = string; export type IDBKeyPath = string;
/**
* @public
*/
export interface EventListener { export interface EventListener {
(evt: Event): void; (evt: Event): void;
} }
/**
* @public
*/
export interface EventListenerObject { export interface EventListenerObject {
handleEvent(evt: Event): void; handleEvent(evt: Event): void;
} }
/**
* @public
*/
export interface EventListenerOptions { export interface EventListenerOptions {
capture?: boolean; capture?: boolean;
} }
/**
* @public
*/
export interface AddEventListenerOptions extends EventListenerOptions { export interface AddEventListenerOptions extends EventListenerOptions {
once?: boolean; once?: boolean;
passive?: boolean; passive?: boolean;
} }
/**
* @public
*/
export type IDBTransactionMode = "readonly" | "readwrite" | "versionchange"; export type IDBTransactionMode = "readonly" | "readwrite" | "versionchange";
/**
* @public
*/
export type EventListenerOrEventListenerObject = export type EventListenerOrEventListenerObject =
| EventListener | EventListener
| EventListenerObject; | EventListenerObject;
@ -68,8 +47,6 @@ export type EventListenerOrEventListenerObject =
/** /**
* EventTarget is a DOM interface implemented by objects that can receive * EventTarget is a DOM interface implemented by objects that can receive
* events and may have listeners for them. * events and may have listeners for them.
*
* @public
*/ */
export interface EventTarget { export interface EventTarget {
/** /**

View File

@ -2,14 +2,10 @@ import {
Backend, Backend,
DatabaseConnection, DatabaseConnection,
DatabaseTransaction, DatabaseTransaction,
IndexProperties,
ObjectStoreProperties,
RecordGetRequest,
RecordGetResponse, RecordGetResponse,
RecordStoreRequest, RecordStoreRequest,
RecordStoreResponse, RecordStoreResponse,
ResultLevel, ResultLevel,
Schema,
StoreLevel, StoreLevel,
} from "./backend-interface.js"; } from "./backend-interface.js";
import { import {
@ -36,6 +32,9 @@ import {
} from "./MemoryBackend.js"; } from "./MemoryBackend.js";
import { Listener } from "./util/FakeEventTarget.js"; import { Listener } from "./util/FakeEventTarget.js";
export * from "./SqliteBackend.js";
export * from "./sqlite3-interface.js";
export * from "./idbtypes.js"; export * from "./idbtypes.js";
export { MemoryBackend } from "./MemoryBackend.js"; export { MemoryBackend } from "./MemoryBackend.js";
export type { AccessStats } from "./MemoryBackend.js"; export type { AccessStats } from "./MemoryBackend.js";
@ -55,21 +54,17 @@ export {
}; };
export type { export type {
DatabaseTransaction, DatabaseTransaction,
RecordGetRequest,
RecordGetResponse, RecordGetResponse,
Schema,
Backend, Backend,
DatabaseList, DatabaseList,
RecordStoreRequest, RecordStoreRequest,
RecordStoreResponse, RecordStoreResponse,
DatabaseConnection, DatabaseConnection,
ObjectStoreProperties,
RequestObj, RequestObj,
DatabaseDump, DatabaseDump,
ObjectStoreDump, ObjectStoreDump,
IndexRecord, IndexRecord,
ObjectStoreRecord, ObjectStoreRecord,
IndexProperties,
MemoryBackendDump, MemoryBackendDump,
Event, Event,
Listener, Listener,

View File

@ -0,0 +1,84 @@
/*
This file is part of GNU Taler
(C) 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
// @ts-ignore: optional dependency
import type Database from "better-sqlite3";
import {
ResultRow,
Sqlite3Interface,
Sqlite3Statement,
} from "./sqlite3-interface.js";
export async function createNodeSqlite3Impl(): Promise<Sqlite3Interface> {
// @ts-ignore: optional dependency
const bsq = (await import("better-sqlite3")).default;
return {
open(filename: string) {
const internalDbHandle = bsq(filename);
return {
internalDbHandle,
close() {
internalDbHandle.close();
},
prepare(stmtStr): Sqlite3Statement {
const stmtHandle = internalDbHandle.prepare(stmtStr);
return {
internalStatement: stmtHandle,
getAll(params): ResultRow[] {
let res: ResultRow[];
if (params === undefined) {
res = stmtHandle.all() as ResultRow[];
} else {
res = stmtHandle.all(params) as ResultRow[];
}
return res;
},
getFirst(params): ResultRow | undefined {
let res: ResultRow | undefined;
if (params === undefined) {
res = stmtHandle.get() as ResultRow | undefined;
} else {
res = stmtHandle.get(params) as ResultRow | undefined;
}
return res;
},
run(params) {
const myParams = [];
if (params !== undefined) {
myParams.push(params);
}
// The better-sqlite3 library doesn't like it we pass
// undefined directly.
let res: Database.RunResult;
if (params !== undefined) {
res = stmtHandle.run(params);
} else {
res = stmtHandle.run();
}
return {
lastInsertRowid: res.lastInsertRowid,
};
},
};
},
exec(sqlStr): void {
internalDbHandle.exec(sqlStr);
},
};
},
};
}

View File

@ -0,0 +1,34 @@
export type Sqlite3Database = {
internalDbHandle: any;
exec(sqlStr: string): void;
prepare(stmtStr: string): Sqlite3Statement;
close(): void;
};
export type Sqlite3Statement = {
internalStatement: any;
run(params?: BindParams): RunResult;
getAll(params?: BindParams): ResultRow[];
getFirst(params?: BindParams): ResultRow | undefined;
};
export interface RunResult {
lastInsertRowid: number | bigint;
}
export type Sqlite3Value = string | Uint8Array | number | null | bigint;
export type BindParams = Record<string, Sqlite3Value | undefined>;
export type ResultRow = Record<string, Sqlite3Value>;
/**
* Common interface that multiple sqlite3 bindings
* (such as better-sqlite3 or qtart's sqlite3 bindings)
* can adapt to.
*
* This does not expose full sqlite3 functionality, but just enough
* to be used by our IndexedDB sqlite3 backend.
*/
export interface Sqlite3Interface {
open(filename: string): Sqlite3Database;
}

View File

@ -0,0 +1,43 @@
/*
Copyright 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
import { createSqliteBackend } from "./SqliteBackend.js";
import { BridgeIDBFactory } from "./bridge-idb.js";
import { IDBFactory } from "./idbtypes.js";
import { createNodeSqlite3Impl } from "./node-sqlite3-impl.js";
let idbFactory: IDBFactory | undefined = undefined;
export async function initTestIndexedDB(): Promise<void> {
// const backend = new MemoryBackend();
// backend.enableTracing = true;
const sqlite3Impl = await createNodeSqlite3Impl();
const backend = await createSqliteBackend(sqlite3Impl, {
filename: ":memory:",
});
idbFactory = new BridgeIDBFactory(backend);
backend.enableTracing = true;
BridgeIDBFactory.enableTracing = false;
}
export function useTestIndexedDb(): IDBFactory {
if (!idbFactory) {
throw Error("indexeddb factory not initialized");
}
return idbFactory;
}

View File

@ -0,0 +1,103 @@
/*
Copyright 2017 Jeremy Scheff
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing
permissions and limitations under the License.
*/
import FakeEventTarget from "./FakeEventTarget.js";
import { Event, EventTarget } from "../idbtypes.js";
/** @public */
export type EventType =
| "abort"
| "blocked"
| "complete"
| "error"
| "success"
| "upgradeneeded"
| "versionchange";
export class FakeDomEvent implements Event {
public eventPath: FakeEventTarget[] = [];
public type: EventType;
public readonly NONE = 0;
public readonly CAPTURING_PHASE = 1;
public readonly AT_TARGET = 2;
public readonly BUBBLING_PHASE = 3;
// Flags
public propagationStopped = false;
public immediatePropagationStopped = false;
public canceled = false;
public initialized = true;
public dispatched = false;
public target: FakeEventTarget | null = null;
public currentTarget: FakeEventTarget | null = null;
public eventPhase: 0 | 1 | 2 | 3 = 0;
public defaultPrevented = false;
public isTrusted = false;
public timeStamp = Date.now();
public bubbles: boolean;
public cancelable: boolean;
constructor(
type: EventType,
eventInitDict: { bubbles?: boolean; cancelable?: boolean } = {},
) {
this.type = type;
this.bubbles =
eventInitDict.bubbles !== undefined ? eventInitDict.bubbles : false;
this.cancelable =
eventInitDict.cancelable !== undefined ? eventInitDict.cancelable : false;
}
cancelBubble: boolean = false;
composed: boolean = false;
returnValue: boolean = false;
get srcElement(): EventTarget | null {
return this.target;
}
composedPath(): EventTarget[] {
throw new Error("Method not implemented.");
}
initEvent(
type: string,
bubbles?: boolean | undefined,
cancelable?: boolean | undefined,
): void {
throw new Error("Method not implemented.");
}
public preventDefault() {
if (this.cancelable) {
this.canceled = true;
}
}
public stopPropagation() {
this.propagationStopped = true;
}
public stopImmediatePropagation() {
this.propagationStopped = true;
this.immediatePropagationStopped = true;
}
}
export default FakeDomEvent;

View File

@ -180,7 +180,7 @@ abstract class FakeEventTarget implements EventTarget {
fe.eventPath.reverse(); fe.eventPath.reverse();
fe.eventPhase = event.BUBBLING_PHASE; fe.eventPhase = event.BUBBLING_PHASE;
if (fe.eventPath.length === 0 && event.type === "error") { if (fe.eventPath.length === 0 && event.type === "error") {
console.error("Unhandled error event: ", event.target); console.error("Unhandled error event on target: ", event.target);
} }
for (const obj of event.eventPath) { for (const obj of event.eventPath) {
if (!event.propagationStopped) { if (!event.propagationStopped) {

View File

@ -19,7 +19,11 @@ import { IDBKeyPath, IDBValidKey } from "../idbtypes.js";
import { valueToKey } from "./valueToKey.js"; import { valueToKey } from "./valueToKey.js";
// http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#dfn-steps-for-extracting-a-key-from-a-value-using-a-key-path // http://www.w3.org/TR/2015/REC-IndexedDB-20150108/#dfn-steps-for-extracting-a-key-from-a-value-using-a-key-path
/**
* Algorithm to "extract a key from a value using a key path".
*/
export const extractKey = (keyPath: IDBKeyPath | IDBKeyPath[], value: any) => { export const extractKey = (keyPath: IDBKeyPath | IDBKeyPath[], value: any) => {
//console.log(`extracting key ${JSON.stringify(keyPath)} from ${JSON.stringify(value)}`);
if (Array.isArray(keyPath)) { if (Array.isArray(keyPath)) {
const result: IDBValidKey[] = []; const result: IDBValidKey[] = [];

View File

@ -0,0 +1,39 @@
/*
This file is part of GNU Taler
(C) 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
import test, { ExecutionContext } from "ava";
import { deserializeKey, serializeKey } from "./key-storage.js";
import { IDBValidKey } from "../idbtypes.js";
function checkKeySer(t: ExecutionContext, k: IDBValidKey): void {
const keyEnc = serializeKey(k);
const keyDec = deserializeKey(keyEnc);
t.deepEqual(k, keyDec);
}
test("basics", (t) => {
checkKeySer(t, "foo");
checkKeySer(t, "foo\0bar");
checkKeySer(t, "foo\u1000bar");
checkKeySer(t, "foo\u2000bar");
checkKeySer(t, "foo\u5000bar");
checkKeySer(t, "foo\uffffbar");
checkKeySer(t, 42);
checkKeySer(t, 255);
checkKeySer(t, 254);
checkKeySer(t, [1, 2, 3, 4]);
checkKeySer(t, [[[1], 3], [4]]);
});

View File

@ -0,0 +1,363 @@
/*
This file is part of GNU Taler
(C) 2023 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
/*
Encoding rules (inspired by Firefox, but slightly simplified):
Numbers: 0x10 n n n n n n n n
Dates: 0x20 n n n n n n n n
Strings: 0x30 s s s s ... 0
Binaries: 0x40 s s s s ... 0
Arrays: 0x50 i i i ... 0
Numbers/dates are encoded as 64-bit IEEE 754 floats with the sign bit
flipped, in order to make them sortable.
*/
/**
* Imports.
*/
import { IDBValidKey } from "../idbtypes.js";
const tagNum = 0xa0;
const tagDate = 0xb0;
const tagString = 0xc0;
const tagBinary = 0xc0;
const tagArray = 0xe0;
const oneByteOffset = 0x01;
const twoByteOffset = 0x7f;
const oneByteMax = 0x7e;
const twoByteMax = 0x3fff + twoByteOffset;
const twoByteMask = 0b1000_0000;
const threeByteMask = 0b1100_0000;
export function countEncSize(c: number): number {
if (c > twoByteMax) {
return 3;
}
if (c > oneByteMax) {
return 2;
}
return 1;
}
export function writeEnc(dv: DataView, offset: number, c: number): number {
if (c > twoByteMax) {
dv.setUint8(offset + 2, (c & 0xff) << 6);
dv.setUint8(offset + 1, (c >>> 2) & 0xff);
dv.setUint8(offset, threeByteMask | (c >>> 10));
return 3;
} else if (c > oneByteMax) {
c -= twoByteOffset;
dv.setUint8(offset + 1, c & 0xff);
dv.setUint8(offset, (c >>> 8) | twoByteMask);
return 2;
} else {
c += oneByteOffset;
dv.setUint8(offset, c);
return 1;
}
}
export function internalSerializeString(
dv: DataView,
offset: number,
key: string,
): number {
dv.setUint8(offset, tagString);
let n = 1;
for (let i = 0; i < key.length; i++) {
let c = key.charCodeAt(i);
n += writeEnc(dv, offset + n, c);
}
// Null terminator
dv.setUint8(offset + n, 0);
n++;
return n;
}
export function countSerializeKey(key: IDBValidKey): number {
if (typeof key === "number") {
return 9;
}
if (key instanceof Date) {
return 9;
}
if (key instanceof ArrayBuffer) {
let len = 2;
const uv = new Uint8Array(key);
for (let i = 0; i < uv.length; i++) {
len += countEncSize(uv[i]);
}
return len;
}
if (ArrayBuffer.isView(key)) {
let len = 2;
const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
for (let i = 0; i < uv.length; i++) {
len += countEncSize(uv[i]);
}
return len;
}
if (typeof key === "string") {
let len = 2;
for (let i = 0; i < key.length; i++) {
len += countEncSize(key.charCodeAt(i));
}
return len;
}
if (Array.isArray(key)) {
let len = 2;
for (let i = 0; i < key.length; i++) {
len += countSerializeKey(key[i]);
}
return len;
}
throw Error("unsupported type for key");
}
function internalSerializeNumeric(
dv: DataView,
offset: number,
tag: number,
val: number,
): number {
dv.setUint8(offset, tagNum);
dv.setFloat64(offset + 1, val);
// Flip sign bit
let b = dv.getUint8(offset + 1);
b ^= 0x80;
dv.setUint8(offset + 1, b);
return 9;
}
function internalSerializeArray(
dv: DataView,
offset: number,
key: any[],
): number {
dv.setUint8(offset, tagArray);
let n = 1;
for (let i = 0; i < key.length; i++) {
n += internalSerializeKey(key[i], dv, offset + n);
}
dv.setUint8(offset + n, 0);
n++;
return n;
}
function internalSerializeBinary(
dv: DataView,
offset: number,
key: Uint8Array,
): number {
dv.setUint8(offset, tagBinary);
let n = 1;
for (let i = 0; i < key.length; i++) {
n += internalSerializeKey(key[i], dv, offset + n);
}
dv.setUint8(offset + n, 0);
n++;
return n;
}
function internalSerializeKey(
key: IDBValidKey,
dv: DataView,
offset: number,
): number {
if (typeof key === "number") {
return internalSerializeNumeric(dv, offset, tagNum, key);
}
if (key instanceof Date) {
return internalSerializeNumeric(dv, offset, tagDate, key.getDate());
}
if (typeof key === "string") {
return internalSerializeString(dv, offset, key);
}
if (Array.isArray(key)) {
return internalSerializeArray(dv, offset, key);
}
if (key instanceof ArrayBuffer) {
return internalSerializeBinary(dv, offset, new Uint8Array(key));
}
if (ArrayBuffer.isView(key)) {
const uv = new Uint8Array(key.buffer, key.byteOffset, key.byteLength);
return internalSerializeBinary(dv, offset, uv);
}
throw Error("unsupported type for key");
}
export function serializeKey(key: IDBValidKey): Uint8Array {
const len = countSerializeKey(key);
let buf = new Uint8Array(len);
const outLen = internalSerializeKey(key, new DataView(buf.buffer), 0);
if (len != outLen) {
throw Error("internal invariant failed");
}
let numTrailingZeroes = 0;
for (let i = buf.length - 1; i >= 0 && buf[i] == 0; i--, numTrailingZeroes++);
if (numTrailingZeroes > 0) {
buf = buf.slice(0, buf.length - numTrailingZeroes);
}
return buf;
}
function internalReadString(dv: DataView, offset: number): [number, string] {
const chars: string[] = [];
while (offset < dv.byteLength) {
const v = dv.getUint8(offset);
if (v == 0) {
// Got end-of-string.
offset += 1;
break;
}
let c: number;
if ((v & threeByteMask) === threeByteMask) {
const b1 = v;
const b2 = dv.getUint8(offset + 1);
const b3 = dv.getUint8(offset + 2);
c = (b1 << 10) | (b2 << 2) | (b3 >> 6);
offset += 3;
} else if ((v & twoByteMask) === twoByteMask) {
const b1 = v & ~twoByteMask;
const b2 = dv.getUint8(offset + 1);
c = ((b1 << 8) | b2) + twoByteOffset;
offset += 2;
} else {
c = v - oneByteOffset;
offset += 1;
}
chars.push(String.fromCharCode(c));
}
return [offset, chars.join("")];
}
function internalReadBytes(dv: DataView, offset: number): [number, Uint8Array] {
let count = 0;
while (offset + count < dv.byteLength) {
const v = dv.getUint8(offset + count);
if (v === 0) {
break;
}
count++;
}
let writePos = 0;
const bytes = new Uint8Array(count);
while (offset < dv.byteLength) {
const v = dv.getUint8(offset);
if (v == 0) {
offset += 1;
break;
}
let c: number;
if ((v & threeByteMask) === threeByteMask) {
const b1 = v;
const b2 = dv.getUint8(offset + 1);
const b3 = dv.getUint8(offset + 2);
c = (b1 << 10) | (b2 << 2) | (b3 >> 6);
offset += 3;
} else if ((v & twoByteMask) === twoByteMask) {
const b1 = v & ~twoByteMask;
const b2 = dv.getUint8(offset + 1);
c = ((b1 << 8) | b2) + twoByteOffset;
offset += 2;
} else {
c = v - oneByteOffset;
offset += 1;
}
bytes[writePos] = c;
writePos++;
}
return [offset, bytes];
}
/**
* Same as DataView.getFloat64, but logically pad input
* with zeroes on the right if read offset would be out
* of bounds.
*
* This allows reading from buffers where zeros have been
* truncated.
*/
function getFloat64Trunc(dv: DataView, offset: number): number {
if (offset + 7 >= dv.byteLength) {
const buf = new Uint8Array(8);
for (let i = offset; i < dv.byteLength; i++) {
buf[i - offset] = dv.getUint8(i);
}
const dv2 = new DataView(buf.buffer);
return dv2.getFloat64(0);
} else {
return dv.getFloat64(offset);
}
}
function internalDeserializeKey(
dv: DataView,
offset: number,
): [number, IDBValidKey] {
let tag = dv.getUint8(offset);
switch (tag) {
case tagNum: {
const num = -getFloat64Trunc(dv, offset + 1);
const newOffset = Math.min(offset + 9, dv.byteLength);
return [newOffset, num];
}
case tagDate: {
const num = -getFloat64Trunc(dv, offset + 1);
const newOffset = Math.min(offset + 9, dv.byteLength);
return [newOffset, new Date(num)];
}
case tagString: {
return internalReadString(dv, offset + 1);
}
case tagBinary: {
return internalReadBytes(dv, offset + 1);
}
case tagArray: {
const arr: any[] = [];
offset += 1;
while (offset < dv.byteLength) {
const innerTag = dv.getUint8(offset);
if (innerTag === 0) {
offset++;
break;
}
const [innerOff, innerVal] = internalDeserializeKey(dv, offset);
arr.push(innerVal);
offset = innerOff;
}
return [offset, arr];
}
default:
throw Error("invalid key (unrecognized tag)");
}
}
export function deserializeKey(encodedKey: Uint8Array): IDBValidKey {
const dv = new DataView(
encodedKey.buffer,
encodedKey.byteOffset,
encodedKey.byteLength,
);
let [off, res] = internalDeserializeKey(dv, 0);
if (off != encodedKey.byteLength) {
throw Error("internal invariant failed");
}
return res;
}

View File

@ -20,55 +20,73 @@ import { makeStoreKeyValue } from "./makeStoreKeyValue.js";
test("basics", (t) => { test("basics", (t) => {
let result; let result;
result = makeStoreKeyValue({ name: "Florian" }, undefined, 42, true, "id"); result = makeStoreKeyValue({
value: { name: "Florian" },
key: undefined,
currentKeyGenerator: 42,
autoIncrement: true,
keyPath: "id",
});
t.is(result.updatedKeyGenerator, 43); t.is(result.updatedKeyGenerator, 43);
t.is(result.key, 42); t.is(result.key, 42);
t.is(result.value.name, "Florian"); t.is(result.value.name, "Florian");
t.is(result.value.id, 42); t.is(result.value.id, 42);
result = makeStoreKeyValue( result = makeStoreKeyValue({
{ name: "Florian", id: 10 }, value: { name: "Florian", id: 10 },
undefined, key: undefined,
5, currentKeyGenerator: 5,
true, autoIncrement: true,
"id", keyPath: "id",
); });
t.is(result.updatedKeyGenerator, 11); t.is(result.updatedKeyGenerator, 11);
t.is(result.key, 10); t.is(result.key, 10);
t.is(result.value.name, "Florian"); t.is(result.value.name, "Florian");
t.is(result.value.id, 10); t.is(result.value.id, 10);
result = makeStoreKeyValue( result = makeStoreKeyValue({
{ name: "Florian", id: 5 }, value: { name: "Florian", id: 5 },
undefined, key: undefined,
10, currentKeyGenerator: 10,
true, autoIncrement: true,
"id", keyPath: "id",
); });
t.is(result.updatedKeyGenerator, 10); t.is(result.updatedKeyGenerator, 10);
t.is(result.key, 5); t.is(result.key, 5);
t.is(result.value.name, "Florian"); t.is(result.value.name, "Florian");
t.is(result.value.id, 5); t.is(result.value.id, 5);
result = makeStoreKeyValue( result = makeStoreKeyValue({
{ name: "Florian", id: "foo" }, value: { name: "Florian", id: "foo" },
undefined, key: undefined,
10, currentKeyGenerator: 10,
true, autoIncrement: true,
"id", keyPath: "id",
); });
t.is(result.updatedKeyGenerator, 10); t.is(result.updatedKeyGenerator, 10);
t.is(result.key, "foo"); t.is(result.key, "foo");
t.is(result.value.name, "Florian"); t.is(result.value.name, "Florian");
t.is(result.value.id, "foo"); t.is(result.value.id, "foo");
result = makeStoreKeyValue({ name: "Florian" }, "foo", 10, true, null); result = makeStoreKeyValue({
value: { name: "Florian" },
key: "foo",
currentKeyGenerator: 10,
autoIncrement: true,
keyPath: null,
});
t.is(result.updatedKeyGenerator, 10); t.is(result.updatedKeyGenerator, 10);
t.is(result.key, "foo"); t.is(result.key, "foo");
t.is(result.value.name, "Florian"); t.is(result.value.name, "Florian");
t.is(result.value.id, undefined); t.is(result.value.id, undefined);
result = makeStoreKeyValue({ name: "Florian" }, undefined, 10, true, null); result = makeStoreKeyValue({
value: { name: "Florian" },
key: undefined,
currentKeyGenerator: 10,
autoIncrement: true,
keyPath: null,
});
t.is(result.updatedKeyGenerator, 11); t.is(result.updatedKeyGenerator, 11);
t.is(result.key, 10); t.is(result.key, 10);
t.is(result.value.name, "Florian"); t.is(result.value.name, "Florian");

View File

@ -75,19 +75,25 @@ function injectKey(
return newValue; return newValue;
} }
export function makeStoreKeyValue( export interface MakeStoreKvRequest {
value: any, value: any;
key: IDBValidKey | undefined, key: IDBValidKey | undefined;
currentKeyGenerator: number, currentKeyGenerator: number;
autoIncrement: boolean, autoIncrement: boolean;
keyPath: IDBKeyPath | IDBKeyPath[] | null, keyPath: IDBKeyPath | IDBKeyPath[] | null;
): StoreKeyResult { }
export function makeStoreKeyValue(req: MakeStoreKvRequest): StoreKeyResult {
const { keyPath, currentKeyGenerator, autoIncrement } = req;
let { key, value } = req;
const haveKey = key !== null && key !== undefined; const haveKey = key !== null && key !== undefined;
const haveKeyPath = keyPath !== null && keyPath !== undefined; const haveKeyPath = keyPath !== null && keyPath !== undefined;
// This models a decision table on (haveKey, haveKeyPath, autoIncrement) // This models a decision table on (haveKey, haveKeyPath, autoIncrement)
try { try {
// FIXME: Perf: only do this if we need to inject something.
value = structuredClone(value); value = structuredClone(value);
} catch (e) { } catch (e) {
throw new DataCloneError(); throw new DataCloneError();

View File

@ -14,6 +14,11 @@
permissions and limitations under the License. permissions and limitations under the License.
*/ */
/**
* Queue a task to be executed *after* the microtask
* queue has been processed, but *before* subsequent setTimeout / setImmediate
* tasks.
*/
export function queueTask(fn: () => void) { export function queueTask(fn: () => void) {
let called = false; let called = false;
const callFirst = () => { const callFirst = () => {

View File

@ -15,7 +15,11 @@
*/ */
import test, { ExecutionContext } from "ava"; import test, { ExecutionContext } from "ava";
import { structuredClone } from "./structuredClone.js"; import {
structuredClone,
structuredEncapsulate,
structuredRevive,
} from "./structuredClone.js";
function checkClone(t: ExecutionContext, x: any): void { function checkClone(t: ExecutionContext, x: any): void {
t.deepEqual(structuredClone(x), x); t.deepEqual(structuredClone(x), x);
@ -59,3 +63,58 @@ test("structured clone (object cycles)", (t) => {
const obj1Clone = structuredClone(obj1); const obj1Clone = structuredClone(obj1);
t.is(obj1Clone, obj1Clone.c); t.is(obj1Clone, obj1Clone.c);
}); });
test("encapsulate", (t) => {
t.deepEqual(structuredEncapsulate(42), 42);
t.deepEqual(structuredEncapsulate(true), true);
t.deepEqual(structuredEncapsulate(false), false);
t.deepEqual(structuredEncapsulate(null), null);
t.deepEqual(structuredEncapsulate(undefined), { $: "undef" });
t.deepEqual(structuredEncapsulate(42n), { $: "bigint", val: "42" });
t.deepEqual(structuredEncapsulate(new Date(42)), { $: "date", val: 42 });
t.deepEqual(structuredEncapsulate({ x: 42 }), { x: 42 });
t.deepEqual(structuredEncapsulate({ $: "bla", x: 42 }), {
$: "obj",
val: { $: "bla", x: 42 },
});
const x = { foo: 42, bar: {} } as any;
x.bar.baz = x;
t.deepEqual(structuredEncapsulate(x), {
foo: 42,
bar: {
baz: { $: "ref", d: 2, p: [] },
},
});
});
test("revive", (t) => {
t.deepEqual(structuredRevive(42), 42);
t.deepEqual(structuredRevive([1, 2, 3]), [1, 2, 3]);
t.deepEqual(structuredRevive(true), true);
t.deepEqual(structuredRevive(false), false);
t.deepEqual(structuredRevive(null), null);
t.deepEqual(structuredRevive({ $: "undef" }), undefined);
t.deepEqual(structuredRevive({ x: { $: "undef" } }), { x: undefined });
t.deepEqual(structuredRevive({ $: "date", val: 42}), new Date(42));
{
const x = { foo: 42, bar: {} } as any;
x.bar.baz = x;
const r = {
foo: 42,
bar: {
baz: { $: "ref", d: 2, p: [] },
},
};
t.deepEqual(structuredRevive(r), x);
}
});

View File

@ -16,22 +16,21 @@
/** /**
* Encoding (new, compositional version): * Encoding (new, compositional version):
* *
* Encapsulate object that itself might contain a "$" field: * Encapsulate object that itself might contain a "$" field:
* { $: { E... } } * { $: "obj", val: ... }
* (Outer level only:) Wrap other values into object
* { $: "lit", val: ... }
* Circular reference: * Circular reference:
* { $: ["ref", uplevel, field...] } * { $: "ref" l: uplevel, p: path }
* Date: * Date:
* { $: ["data"], val: datestr } * { $: "date", val: datestr }
* Bigint: * Bigint:
* { $: ["bigint"], val: bigintstr } * { $: "bigint", val: bigintstr }
* Array with special (non-number) attributes: * Array with special (non-number) attributes:
* { $: ["array"], val: arrayobj } * { $: "array", val: arrayobj }
* Undefined field * Undefined field
* { $: "undef" } * { $: "undef" }
*
* Legacy (top-level only), for backwards compatibility:
* { $types: [...] }
*/ */
/** /**
@ -261,22 +260,18 @@ export function mkDeepCloneCheckOnly() {
function internalEncapsulate( function internalEncapsulate(
val: any, val: any,
outRoot: any,
path: string[], path: string[],
memo: Map<any, string[]>, memo: Map<any, string[]>,
types: Array<[string[], string]>,
): any { ): any {
const memoPath = memo.get(val); const memoPath = memo.get(val);
if (memoPath) { if (memoPath) {
types.push([path, "ref"]); return { $: "ref", d: path.length, p: memoPath };
return memoPath;
} }
if (val === null) { if (val === null) {
return null; return null;
} }
if (val === undefined) { if (val === undefined) {
types.push([path, "undef"]); return { $: "undef" };
return 0;
} }
if (Array.isArray(val)) { if (Array.isArray(val)) {
memo.set(val, path); memo.set(val, path);
@ -289,31 +284,33 @@ function internalEncapsulate(
break; break;
} }
} }
if (special) {
types.push([path, "array"]);
}
for (const x in val) { for (const x in val) {
const p = [...path, x]; const p = [...path, x];
outArr[x] = internalEncapsulate(val[x], outRoot, p, memo, types); outArr[x] = internalEncapsulate(val[x], p, memo);
}
if (special) {
return { $: "array", val: outArr };
} else {
return outArr;
} }
return outArr;
} }
if (val instanceof Date) { if (val instanceof Date) {
types.push([path, "date"]); return { $: "date", val: val.getTime() };
return val.getTime();
} }
if (isUserObject(val) || isPlainObject(val)) { if (isUserObject(val) || isPlainObject(val)) {
memo.set(val, path); memo.set(val, path);
const outObj: any = {}; const outObj: any = {};
for (const x in val) { for (const x in val) {
const p = [...path, x]; const p = [...path, x];
outObj[x] = internalEncapsulate(val[x], outRoot, p, memo, types); outObj[x] = internalEncapsulate(val[x], p, memo);
}
if ("$" in outObj) {
return { $: "obj", val: outObj };
} }
return outObj; return outObj;
} }
if (typeof val === "bigint") { if (typeof val === "bigint") {
types.push([path, "bigint"]); return { $: "bigint", val: val.toString() };
return val.toString();
} }
if (typeof val === "boolean") { if (typeof val === "boolean") {
return val; return val;
@ -327,123 +324,103 @@ function internalEncapsulate(
throw Error(); throw Error();
} }
/** function derefPath(
* Encapsulate a cloneable value into a plain JSON object. root: any,
*/ p1: Array<string | number>,
export function structuredEncapsulate(val: any): any { n: number,
const outRoot = {}; p2: Array<string | number>,
const types: Array<[string[], string]> = []; ): any {
let res; let v = root;
res = internalEncapsulate(val, outRoot, [], new Map(), types); for (let i = 0; i < n; i++) {
if (res === null) { v = v[p1[i]];
return res;
} }
// We need to further encapsulate the outer layer for (let i = 0; i < p2.length; i++) {
if ( v = v[p2[i]];
Array.isArray(res) ||
typeof res !== "object" ||
"$" in res ||
"$types" in res
) {
res = { $: res };
} }
if (types.length > 0) { return v;
res["$types"] = types;
}
return res;
} }
export function applyLegacyTypeAnnotations(val: any): any { function internalReviveArray(sval: any, root: any, path: string[]): any {
if (val === null) { const newArr: any[] = [];
if (root === undefined) {
root = newArr;
}
for (let i = 0; i < sval.length; i++) {
const p = [...path, String(i)];
newArr.push(internalStructuredRevive(sval[i], root, p));
}
return newArr;
}
function internalReviveObject(sval: any, root: any, path: string[]): any {
const newObj = {} as any;
if (root === undefined) {
root = newObj;
}
for (const key of Object.keys(sval)) {
const p = [...path, key];
newObj[key] = internalStructuredRevive(sval[key], root, p);
}
return newObj;
}
function internalStructuredRevive(sval: any, root: any, path: string[]): any {
if (typeof sval === "string") {
return sval;
}
if (typeof sval === "number") {
return sval;
}
if (typeof sval === "boolean") {
return sval;
}
if (sval === null) {
return null; return null;
} }
if (typeof val === "number") { if (Array.isArray(sval)) {
return val; return internalReviveArray(sval, root, path);
} }
if (typeof val === "string") {
return val; if (isUserObject(sval) || isPlainObject(sval)) {
} if ("$" in sval) {
if (typeof val === "boolean") { const dollar = sval.$;
return val; switch (dollar) {
} case "undef":
if (!isPlainObject(val)) { return undefined;
throw Error(); case "bigint":
} return BigInt((sval as any).val);
let types = val.$types ?? []; case "date":
delete val.$types; return new Date((sval as any).val);
let outRoot: any; case "obj": {
if ("$" in val) { return internalReviveObject((sval as any).val, root, path);
outRoot = val.$; }
} else { case "array":
outRoot = val; return internalReviveArray((sval as any).val, root, path);
} case "ref": {
function mutatePath(path: string[], f: (x: any) => any): void { const level = (sval as any).l;
if (path.length == 0) { const p2 = (sval as any).p;
outRoot = f(outRoot); return derefPath(root, path, path.length - level, p2);
return; }
} default:
let obj = outRoot; throw Error();
for (let i = 0; i < path.length - 1; i++) {
const n = path[i];
if (!(n in obj)) {
obj[n] = {};
} }
obj = obj[n]; } else {
} return internalReviveObject(sval, root, path);
const last = path[path.length - 1];
obj[last] = f(obj[last]);
}
function lookupPath(path: string[]): any {
let obj = outRoot;
for (const n of path) {
obj = obj[n];
}
return obj;
}
for (const [path, type] of types) {
switch (type) {
case "bigint": {
mutatePath(path, (x) => BigInt(x));
break;
}
case "array": {
mutatePath(path, (x) => {
const newArr: any = [];
for (const k in x) {
newArr[k] = x[k];
}
return newArr;
});
break;
}
case "date": {
mutatePath(path, (x) => new Date(x));
break;
}
case "undef": {
mutatePath(path, (x) => undefined);
break;
}
case "ref": {
mutatePath(path, (x) => lookupPath(x));
break;
}
default:
throw Error(`type '${type}' not implemented`);
} }
} }
return outRoot;
throw Error();
} }
export function internalStructuredRevive(val: any): any { /**
// FIXME: Do the newly specified, compositional encoding here. * Encapsulate a cloneable value into a plain JSON value.
val = JSON.parse(JSON.stringify(val)); */
return val; export function structuredEncapsulate(val: any): any {
return internalEncapsulate(val, [], new Map());
} }
export function structuredRevive(val: any): any { export function structuredRevive(sval: any): any {
const r = internalStructuredRevive(val); return internalStructuredRevive(sval, undefined, []);
return applyLegacyTypeAnnotations(r);
} }
/** /**

View File

@ -17,7 +17,11 @@
import { IDBValidKey } from "../idbtypes.js"; import { IDBValidKey } from "../idbtypes.js";
import { DataError } from "./errors.js"; import { DataError } from "./errors.js";
// https://www.w3.org/TR/IndexedDB-2/#convert-a-value-to-a-key /**
* Algorithm to "convert a value to a key".
*
* https://www.w3.org/TR/IndexedDB/#convert-value-to-key
*/
export function valueToKey( export function valueToKey(
input: any, input: any,
seen?: Set<object>, seen?: Set<object>,

View File

@ -4,7 +4,7 @@
"lib": ["es6"], "lib": ["es6"],
"module": "ES2020", "module": "ES2020",
"moduleResolution": "Node16", "moduleResolution": "Node16",
"target": "ES6", "target": "ES2020",
"allowJs": true, "allowJs": true,
"noImplicitAny": true, "noImplicitAny": true,
"outDir": "lib", "outDir": "lib",

View File

@ -17,7 +17,7 @@ msgstr ""
"Project-Id-Version: Taler Wallet\n" "Project-Id-Version: Taler Wallet\n"
"Report-Msgid-Bugs-To: taler@gnu.org\n" "Report-Msgid-Bugs-To: taler@gnu.org\n"
"POT-Creation-Date: 2016-11-23 00:00+0100\n" "POT-Creation-Date: 2016-11-23 00:00+0100\n"
"PO-Revision-Date: 2023-08-15 07:28+0000\n" "PO-Revision-Date: 2023-08-16 12:43+0000\n"
"Last-Translator: Krystian Baran <kiszkot@murena.io>\n" "Last-Translator: Krystian Baran <kiszkot@murena.io>\n"
"Language-Team: Italian <https://weblate.taler.net/projects/gnu-taler/" "Language-Team: Italian <https://weblate.taler.net/projects/gnu-taler/"
"merchant-backoffice/it/>\n" "merchant-backoffice/it/>\n"
@ -1203,7 +1203,7 @@ msgstr ""
#: src/paths/instance/orders/details/DetailPage.tsx:767 #: src/paths/instance/orders/details/DetailPage.tsx:767
#, c-format #, c-format
msgid "Back" msgid "Back"
msgstr "" msgstr "Indietro"
#: src/paths/instance/orders/details/index.tsx:79 #: src/paths/instance/orders/details/index.tsx:79
#, c-format #, c-format
@ -2644,7 +2644,7 @@ msgstr ""
#: src/components/menu/SideBar.tsx:91 #: src/components/menu/SideBar.tsx:91
#, c-format #, c-format
msgid "Settings" msgid "Settings"
msgstr "" msgstr "Impostazioni"
#: src/components/menu/SideBar.tsx:167 #: src/components/menu/SideBar.tsx:167
#, c-format #, c-format

View File

@ -68,7 +68,6 @@ export async function runBench1(configJson: any): Promise<void> {
} else { } else {
logger.info("not trusting exchange (validating signatures)"); logger.info("not trusting exchange (validating signatures)");
} }
const batchWithdrawal = !!process.env["TALER_WALLET_BATCH_WITHDRAWAL"];
let wallet = {} as Wallet; let wallet = {} as Wallet;
let getDbStats: () => AccessStats; let getDbStats: () => AccessStats;
@ -91,9 +90,7 @@ export async function runBench1(configJson: any): Promise<void> {
testing: { testing: {
insecureTrustExchange: trustExchange, insecureTrustExchange: trustExchange,
}, },
features: { features: {},
batchWithdrawal,
},
}, },
}); });
wallet = res.wallet; wallet = res.wallet;

View File

@ -76,8 +76,6 @@ export async function runBench3(configJson: any): Promise<void> {
} else { } else {
logger.info("not trusting exchange (validating signatures)"); logger.info("not trusting exchange (validating signatures)");
} }
const batchWithdrawal = !!process.env["TALER_WALLET_BATCH_WITHDRAWAL"];
let wallet = {} as Wallet; let wallet = {} as Wallet;
let getDbStats: () => AccessStats; let getDbStats: () => AccessStats;
@ -96,9 +94,7 @@ export async function runBench3(configJson: any): Promise<void> {
persistentStoragePath: undefined, persistentStoragePath: undefined,
httpLib: myHttpLib, httpLib: myHttpLib,
config: { config: {
features: { features: {},
batchWithdrawal,
},
testing: { testing: {
insecureTrustExchange: trustExchange, insecureTrustExchange: trustExchange,
}, },

View File

@ -1,59 +0,0 @@
/*
This file is part of GNU Taler
(C) 2020 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
/**
* Imports.
*/
import { GlobalTestState, MerchantPrivateApi } from "../harness/harness.js";
import {
createSimpleTestkudosEnvironment,
withdrawViaBank,
} from "../harness/helpers.js";
/**
* Run test for basic, bank-integrated withdrawal.
*/
export async function runPromptPaymentScenario(t: GlobalTestState) {
// Set up test environment
const { wallet, bank, exchange, merchant } =
await createSimpleTestkudosEnvironment(t);
// Withdraw digital cash into the wallet.
await withdrawViaBank(t, { wallet, bank, exchange, amount: "TESTKUDOS:20" });
// Set up order.
const orderResp = await MerchantPrivateApi.createOrder(merchant, "default", {
order: {
summary: "Buy me!",
amount: "TESTKUDOS:5",
fulfillment_url: "taler://fulfillment-success/thx",
},
});
let orderStatus = await MerchantPrivateApi.queryPrivateOrderStatus(merchant, {
orderId: orderResp.order_id,
});
t.assertTrue(orderStatus.order_status === "unpaid");
console.log(orderStatus);
// Wait "forever"
await new Promise(() => {});
}

View File

@ -0,0 +1,59 @@
/*
This file is part of GNU Taler
(C) 2020 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
/**
* Imports.
*/
import { WalletApiOperation } from "@gnu-taler/taler-wallet-core";
import { GlobalTestState } from "../harness/harness.js";
import {
createSimpleTestkudosEnvironmentV2,
withdrawViaBankV2,
makeTestPaymentV2,
} from "../harness/helpers.js";
import { j2s } from "@gnu-taler/taler-util";
/**
* Run test for basic, bank-integrated withdrawal and payment.
*/
export async function runSimplePaymentTest(t: GlobalTestState) {
// Set up test environment
const { walletClient, bank, exchange, merchant } =
await createSimpleTestkudosEnvironmentV2(t);
// Withdraw digital cash into the wallet.
await withdrawViaBankV2(t, {
walletClient,
bank,
exchange,
amount: "TESTKUDOS:20",
});
await walletClient.call(WalletApiOperation.TestingWaitTransactionsFinal, {});
const order = {
summary: "Buy me!",
amount: "TESTKUDOS:5",
fulfillment_url: "taler://fulfillment-success/thx",
};
await makeTestPaymentV2(t, { walletClient, merchant, order });
await walletClient.call(WalletApiOperation.TestingWaitTransactionsFinal, {});
}
runSimplePaymentTest.suites = ["wallet"];

View File

@ -103,6 +103,7 @@ import { runPaymentTemplateTest } from "./test-payment-template.js";
import { runExchangeDepositTest } from "./test-exchange-deposit.js"; import { runExchangeDepositTest } from "./test-exchange-deposit.js";
import { runPeerRepairTest } from "./test-peer-repair.js"; import { runPeerRepairTest } from "./test-peer-repair.js";
import { runPaymentShareTest } from "./test-payment-share.js"; import { runPaymentShareTest } from "./test-payment-share.js";
import { runSimplePaymentTest } from "./test-simple-payment.js";
/** /**
* Test runner. * Test runner.
@ -129,6 +130,7 @@ const allTests: TestMainFunction[] = [
runClauseSchnorrTest, runClauseSchnorrTest,
runDenomUnofferedTest, runDenomUnofferedTest,
runDepositTest, runDepositTest,
runSimplePaymentTest,
runExchangeManagementTest, runExchangeManagementTest,
runExchangeTimetravelTest, runExchangeTimetravelTest,
runFeeRegressionTest, runFeeRegressionTest,

View File

@ -11,6 +11,7 @@
".": { ".": {
"node": "./lib/index.node.js", "node": "./lib/index.node.js",
"browser": "./lib/index.browser.js", "browser": "./lib/index.browser.js",
"qtart": "./lib/index.qtart.js",
"default": "./lib/index.js" "default": "./lib/index.js"
}, },
"./twrpc": { "./twrpc": {

View File

@ -0,0 +1,27 @@
/*
This file is part of GNU Taler
(C) 2021 Taler Systems S.A.
GNU Taler is free software; you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation; either version 3, or (at your option) any later version.
GNU Taler is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
GNU Taler; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
*/
import { setPRNG } from "./nacl-fast.js";
setPRNG(function (x: Uint8Array, n: number) {
// @ts-ignore
const va = globalThis._tart.randomBytes(n);
const v = new Uint8Array(va);
for (let i = 0; i < n; i++) x[i] = v[i];
for (let i = 0; i < v.length; i++) v[i] = 0;
});
export * from "./index.js";

View File

@ -48,6 +48,7 @@ import {
RefreshReason, RefreshReason,
TalerErrorDetail, TalerErrorDetail,
TransactionIdStr, TransactionIdStr,
TransactionStateFilter,
} from "./wallet-types.js"; } from "./wallet-types.js";
export interface TransactionsRequest { export interface TransactionsRequest {
@ -65,6 +66,8 @@ export interface TransactionsRequest {
* If true, include all refreshes in the transactions list. * If true, include all refreshes in the transactions list.
*/ */
includeRefreshes?: boolean; includeRefreshes?: boolean;
filterByState?: TransactionStateFilter
} }
export interface TransactionState { export interface TransactionState {

View File

@ -2644,3 +2644,10 @@ export const codecForValidateIbanResponse = (): Codec<ValidateIbanResponse> =>
buildCodecForObject<ValidateIbanResponse>() buildCodecForObject<ValidateIbanResponse>()
.property("valid", codecForBoolean()) .property("valid", codecForBoolean())
.build("ValidateIbanResponse"); .build("ValidateIbanResponse");
export type TransactionStateFilter = "nonfinal";
export interface TransactionRecordFilter {
onlyState?: TransactionStateFilter;
onlyCurrency?: string;
}

View File

@ -24,7 +24,9 @@ install_target = $(prefix)/lib/taler-wallet-cli
.PHONY: install install-nodeps deps .PHONY: install install-nodeps deps
install-nodeps: install-nodeps:
./build-node.mjs ./build-node.mjs
@echo installing wallet CLI to $(install_target)
install -d $(prefix)/bin install -d $(prefix)/bin
install -d $(install_target)/build
install -d $(install_target)/bin install -d $(install_target)/bin
install -d $(install_target)/node_modules/taler-wallet-cli install -d $(install_target)/node_modules/taler-wallet-cli
install -d $(install_target)/node_modules/taler-wallet-cli/bin install -d $(install_target)/node_modules/taler-wallet-cli/bin
@ -32,6 +34,8 @@ install-nodeps:
install ./dist/taler-wallet-cli-bundled.cjs $(install_target)/node_modules/taler-wallet-cli/dist/ install ./dist/taler-wallet-cli-bundled.cjs $(install_target)/node_modules/taler-wallet-cli/dist/
install ./dist/taler-wallet-cli-bundled.cjs.map $(install_target)/node_modules/taler-wallet-cli/dist/ install ./dist/taler-wallet-cli-bundled.cjs.map $(install_target)/node_modules/taler-wallet-cli/dist/
install ./bin/taler-wallet-cli.mjs $(install_target)/node_modules/taler-wallet-cli/bin/ install ./bin/taler-wallet-cli.mjs $(install_target)/node_modules/taler-wallet-cli/bin/
install ../idb-bridge/node_modules/better-sqlite3/build/Release/better_sqlite3.node $(install_target)/build/ \
|| echo "sqlite3 unavailable, better-sqlite3 native module not found"
ln -sf $(install_target)/node_modules/taler-wallet-cli/bin/taler-wallet-cli.mjs $(prefix)/bin/taler-wallet-cli ln -sf $(install_target)/node_modules/taler-wallet-cli/bin/taler-wallet-cli.mjs $(prefix)/bin/taler-wallet-cli
deps: deps:
pnpm install --frozen-lockfile --filter @gnu-taler/taler-wallet-cli... pnpm install --frozen-lockfile --filter @gnu-taler/taler-wallet-cli...

View File

@ -2,3 +2,8 @@
This package provides `taler-wallet-cli`, the command-line interface for the This package provides `taler-wallet-cli`, the command-line interface for the
GNU Taler wallet. GNU Taler wallet.
## sqlite3 backend
To be able to use the sqlite3 backend, make sure that better-sqlite3
is installed as an optional dependency in the ../idb-bridge package.

View File

@ -0,0 +1,8 @@
#!/usr/bin/env node
// Execute the wallet CLI from the source directory.
// This script is meant for testing and must not
// be installed.
import { main } from '../lib/index.js';
main();

View File

@ -59,7 +59,7 @@ export const buildConfig = {
conditions: ["qtart"], conditions: ["qtart"],
sourcemap: true, sourcemap: true,
// quickjs standard library // quickjs standard library
external: ["std", "os"], external: ["std", "os", "better-sqlite3"],
define: { define: {
__VERSION__: `"${_package.version}"`, __VERSION__: `"${_package.version}"`,
__GIT_HASH__: `"${GIT_HASH}"`, __GIT_HASH__: `"${GIT_HASH}"`,

View File

@ -258,7 +258,6 @@ async function createLocalWallet(
cryptoWorkerType: walletCliArgs.wallet.cryptoWorker as any, cryptoWorkerType: walletCliArgs.wallet.cryptoWorker as any,
config: { config: {
features: { features: {
batchWithdrawal: checkEnvFlag("TALER_WALLET_BATCH_WITHDRAWAL"),
}, },
testing: { testing: {
devModeActive: checkEnvFlag("TALER_WALLET_DEV_MODE"), devModeActive: checkEnvFlag("TALER_WALLET_DEV_MODE"),

View File

@ -119,7 +119,7 @@ export const CURRENT_DB_CONFIG_KEY = "currentMainDbName";
* backwards-compatible way or object stores and indices * backwards-compatible way or object stores and indices
* are added. * are added.
*/ */
export const WALLET_DB_MINOR_VERSION = 9; export const WALLET_DB_MINOR_VERSION = 10;
/** /**
* Ranges for operation status fields. * Ranges for operation status fields.
@ -2687,6 +2687,9 @@ export const WalletStoresV1 = {
}), }),
{ {
byProposalId: describeIndex("byProposalId", "proposalId"), byProposalId: describeIndex("byProposalId", "proposalId"),
byStatus: describeIndex("byStatus", "status", {
versionAdded: 10,
}),
}, },
), ),
refundItems: describeStore( refundItems: describeStore(

View File

@ -16,7 +16,7 @@
import { WalletNotification } from "@gnu-taler/taler-util"; import { WalletNotification } from "@gnu-taler/taler-util";
import { HttpRequestLibrary } from "@gnu-taler/taler-util/http"; import { HttpRequestLibrary } from "@gnu-taler/taler-util/http";
import { WalletConfig, WalletConfigParameter } from "./index.js"; import { WalletConfigParameter } from "./index.js";
/** /**
* Helpers to initiate a wallet in a host environment. * Helpers to initiate a wallet in a host environment.

View File

@ -27,6 +27,7 @@ import type { IDBFactory } from "@gnu-taler/idb-bridge";
import { import {
BridgeIDBFactory, BridgeIDBFactory,
MemoryBackend, MemoryBackend,
createSqliteBackend,
shimIndexedDB, shimIndexedDB,
} from "@gnu-taler/idb-bridge"; } from "@gnu-taler/idb-bridge";
import { AccessStats } from "@gnu-taler/idb-bridge"; import { AccessStats } from "@gnu-taler/idb-bridge";
@ -39,24 +40,21 @@ import { createPlatformHttpLib } from "@gnu-taler/taler-util/http";
import { SetTimeoutTimerAPI } from "./util/timer.js"; import { SetTimeoutTimerAPI } from "./util/timer.js";
import { Wallet } from "./wallet.js"; import { Wallet } from "./wallet.js";
import { DefaultNodeWalletArgs, makeTempfileId } from "./host-common.js"; import { DefaultNodeWalletArgs, makeTempfileId } from "./host-common.js";
import { createNodeSqlite3Impl } from "@gnu-taler/idb-bridge/node-sqlite3-bindings";
const logger = new Logger("host-impl.node.ts"); const logger = new Logger("host-impl.node.ts");
/** interface MakeDbResult {
* Get a wallet instance with default settings for node. idbFactory: BridgeIDBFactory;
* getStats: () => AccessStats;
* Extended version that allows getting DB stats. }
*/
export async function createNativeWalletHost2( async function makeFileDb(
args: DefaultNodeWalletArgs = {}, args: DefaultNodeWalletArgs = {},
): Promise<{ ): Promise<MakeDbResult> {
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
BridgeIDBFactory.enableTracing = false; BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend(); const myBackend = new MemoryBackend();
myBackend.enableTracing = false; myBackend.enableTracing = false;
const storagePath = args.persistentStoragePath; const storagePath = args.persistentStoragePath;
if (storagePath) { if (storagePath) {
try { try {
@ -96,8 +94,41 @@ export async function createNativeWalletHost2(
BridgeIDBFactory.enableTracing = false; BridgeIDBFactory.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend); const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory; return {
idbFactory: myBridgeIdbFactory,
getStats: () => myBackend.accessStats,
};
}
async function makeSqliteDb(
args: DefaultNodeWalletArgs,
): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const imp = await createNodeSqlite3Impl();
const myBackend = await createSqliteBackend(imp, {
filename: args.persistentStoragePath ?? ":memory:",
});
myBackend.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
return {
getStats() {
throw Error("not implemented");
},
idbFactory: myBridgeIdbFactory,
};
}
/**
* Get a wallet instance with default settings for node.
*
* Extended version that allows getting DB stats.
*/
export async function createNativeWalletHost2(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
let myHttpLib; let myHttpLib;
if (args.httpLib) { if (args.httpLib) {
myHttpLib = args.httpLib; myHttpLib = args.httpLib;
@ -115,7 +146,19 @@ export async function createNativeWalletHost2(
); );
}; };
shimIndexedDB(myBridgeIdbFactory); let dbResp: MakeDbResult;
if (args.persistentStoragePath &&args.persistentStoragePath.endsWith(".json")) {
logger.info("using legacy file-based DB backend");
dbResp = await makeFileDb(args);
} else {
logger.info("using sqlite3 DB backend");
dbResp = await makeSqliteDb(args);
}
const myIdbFactory: IDBFactory = dbResp.idbFactory as any as IDBFactory;
shimIndexedDB(dbResp.idbFactory);
const myDb = await openTalerDatabase(myIdbFactory, myVersionChange); const myDb = await openTalerDatabase(myIdbFactory, myVersionChange);
@ -158,6 +201,6 @@ export async function createNativeWalletHost2(
} }
return { return {
wallet: w, wallet: w,
getDbStats: () => myBackend.accessStats, getDbStats: dbResp.getStats,
}; };
} }

View File

@ -22,11 +22,17 @@
/** /**
* Imports. * Imports.
*/ */
import type { IDBFactory } from "@gnu-taler/idb-bridge"; import type {
IDBFactory,
ResultRow,
Sqlite3Interface,
Sqlite3Statement,
} from "@gnu-taler/idb-bridge";
// eslint-disable-next-line no-duplicate-imports // eslint-disable-next-line no-duplicate-imports
import { import {
BridgeIDBFactory, BridgeIDBFactory,
MemoryBackend, MemoryBackend,
createSqliteBackend,
shimIndexedDB, shimIndexedDB,
} from "@gnu-taler/idb-bridge"; } from "@gnu-taler/idb-bridge";
import { AccessStats } from "@gnu-taler/idb-bridge"; import { AccessStats } from "@gnu-taler/idb-bridge";
@ -41,12 +47,78 @@ import { DefaultNodeWalletArgs, makeTempfileId } from "./host-common.js";
const logger = new Logger("host-impl.qtart.ts"); const logger = new Logger("host-impl.qtart.ts");
export async function createNativeWalletHost2( interface MakeDbResult {
idbFactory: BridgeIDBFactory;
getStats: () => AccessStats;
}
let numStmt = 0;
export async function createQtartSqlite3Impl(): Promise<Sqlite3Interface> {
const tart: any = (globalThis as any)._tart;
if (!tart) {
throw Error("globalThis._qtart not defined");
}
return {
open(filename: string) {
const internalDbHandle = tart.sqlite3Open(filename);
return {
internalDbHandle,
close() {
tart.sqlite3Close(internalDbHandle);
},
prepare(stmtStr): Sqlite3Statement {
const stmtHandle = tart.sqlite3Prepare(internalDbHandle, stmtStr);
return {
internalStatement: stmtHandle,
getAll(params): ResultRow[] {
numStmt++;
return tart.sqlite3StmtGetAll(stmtHandle, params);
},
getFirst(params): ResultRow | undefined {
numStmt++;
return tart.sqlite3StmtGetFirst(stmtHandle, params);
},
run(params) {
numStmt++;
return tart.sqlite3StmtRun(stmtHandle, params);
},
};
},
exec(sqlStr): void {
numStmt++;
tart.sqlite3Exec(internalDbHandle, sqlStr);
},
};
},
};
}
async function makeSqliteDb(
args: DefaultNodeWalletArgs,
): Promise<MakeDbResult> {
BridgeIDBFactory.enableTracing = false;
const imp = await createQtartSqlite3Impl();
const myBackend = await createSqliteBackend(imp, {
filename: args.persistentStoragePath ?? ":memory:",
});
myBackend.trackStats = true;
myBackend.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
return {
getStats() {
return {
...myBackend.accessStats,
primitiveStatements: numStmt,
}
},
idbFactory: myBridgeIdbFactory,
};
}
async function makeFileDb(
args: DefaultNodeWalletArgs = {}, args: DefaultNodeWalletArgs = {},
): Promise<{ ): Promise<MakeDbResult> {
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
BridgeIDBFactory.enableTracing = false; BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend(); const myBackend = new MemoryBackend();
myBackend.enableTracing = false; myBackend.enableTracing = false;
@ -78,12 +150,34 @@ export async function createNativeWalletHost2(
}; };
} }
logger.info("done processing storage path"); const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
return {
idbFactory: myBridgeIdbFactory,
getStats: () => myBackend.accessStats,
};
}
export async function createNativeWalletHost2(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
BridgeIDBFactory.enableTracing = false; BridgeIDBFactory.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend); let dbResp: MakeDbResult;
const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory;
if (args.persistentStoragePath && args.persistentStoragePath.endsWith(".json")) {
logger.info("using JSON file DB backend (slow!)");
dbResp = await makeFileDb(args);
} else {
logger.info("using sqlite3 DB backend (experimental!)");
dbResp = await makeSqliteDb(args)
}
const myIdbFactory: IDBFactory = dbResp.idbFactory as any as IDBFactory;
shimIndexedDB(dbResp.idbFactory);
let myHttpLib; let myHttpLib;
if (args.httpLib) { if (args.httpLib) {
@ -102,8 +196,6 @@ export async function createNativeWalletHost2(
); );
}; };
shimIndexedDB(myBridgeIdbFactory);
const myDb = await openTalerDatabase(myIdbFactory, myVersionChange); const myDb = await openTalerDatabase(myIdbFactory, myVersionChange);
let workerFactory; let workerFactory;
@ -124,6 +216,6 @@ export async function createNativeWalletHost2(
} }
return { return {
wallet: w, wallet: w,
getDbStats: () => myBackend.accessStats, getDbStats: dbResp.getStats,
}; };
} }

View File

@ -16,7 +16,6 @@
import { DefaultNodeWalletArgs } from "./host-common.js"; import { DefaultNodeWalletArgs } from "./host-common.js";
import { Wallet } from "./index.js"; import { Wallet } from "./index.js";
import * as hostImpl from "#host-impl"; import * as hostImpl from "#host-impl";
import { AccessStats } from "@gnu-taler/idb-bridge"; import { AccessStats } from "@gnu-taler/idb-bridge";

View File

@ -420,10 +420,10 @@ async function handlePeerPullCreditCreatePurse(
pullIni.exchangeBaseUrl, pullIni.exchangeBaseUrl,
); );
const httpResp = await ws.http.postJson( const httpResp = await ws.http.fetch(reservePurseMergeUrl.href, {
reservePurseMergeUrl.href, method: "POST",
reservePurseReqBody, body: reservePurseReqBody,
); });
if (httpResp.status === HttpStatusCode.UnavailableForLegalReasons) { if (httpResp.status === HttpStatusCode.UnavailableForLegalReasons) {
const respJson = await httpResp.json(); const respJson = await httpResp.json();
@ -759,23 +759,23 @@ export async function initiatePeerPullPayment(
.mktx((x) => [x.peerPullPaymentInitiations, x.contractTerms]) .mktx((x) => [x.peerPullPaymentInitiations, x.contractTerms])
.runReadWrite(async (tx) => { .runReadWrite(async (tx) => {
const ppi: PeerPullPaymentInitiationRecord = { const ppi: PeerPullPaymentInitiationRecord = {
amount: req.partialContractTerms.amount, amount: req.partialContractTerms.amount,
contractTermsHash: hContractTerms, contractTermsHash: hContractTerms,
exchangeBaseUrl: exchangeBaseUrl, exchangeBaseUrl: exchangeBaseUrl,
pursePriv: pursePair.priv, pursePriv: pursePair.priv,
pursePub: pursePair.pub, pursePub: pursePair.pub,
mergePriv: mergePair.priv, mergePriv: mergePair.priv,
mergePub: mergePair.pub, mergePub: mergePair.pub,
status: PeerPullPaymentInitiationStatus.PendingCreatePurse, status: PeerPullPaymentInitiationStatus.PendingCreatePurse,
contractTerms: contractTerms, contractTerms: contractTerms,
mergeTimestamp, mergeTimestamp,
contractEncNonce, contractEncNonce,
mergeReserveRowId: mergeReserveRowId, mergeReserveRowId: mergeReserveRowId,
contractPriv: contractKeyPair.priv, contractPriv: contractKeyPair.priv,
contractPub: contractKeyPair.pub, contractPub: contractKeyPair.pub,
withdrawalGroupId, withdrawalGroupId,
estimatedAmountEffective: wi.withdrawalAmountEffective, estimatedAmountEffective: wi.withdrawalAmountEffective,
} };
await tx.peerPullPaymentInitiations.put(ppi); await tx.peerPullPaymentInitiations.put(ppi);
const oldTxState: TransactionState = { const oldTxState: TransactionState = {
major: TransactionMajorState.None, major: TransactionMajorState.None,

View File

@ -34,13 +34,24 @@ import {
WithdrawalGroupStatus, WithdrawalGroupStatus,
RewardRecordStatus, RewardRecordStatus,
DepositOperationStatus, DepositOperationStatus,
RefreshGroupRecord,
WithdrawalGroupRecord,
DepositGroupRecord,
RewardRecord,
PurchaseRecord,
PeerPullPaymentInitiationRecord,
PeerPullPaymentIncomingRecord,
PeerPushPaymentInitiationRecord,
PeerPushPaymentIncomingRecord,
RefundGroupRecord,
RefundGroupStatus,
} from "../db.js"; } from "../db.js";
import { import {
PendingOperationsResponse, PendingOperationsResponse,
PendingTaskType, PendingTaskType,
TaskId, TaskId,
} from "../pending-types.js"; } from "../pending-types.js";
import { AbsoluteTime } from "@gnu-taler/taler-util"; import { AbsoluteTime, TransactionRecordFilter } from "@gnu-taler/taler-util";
import { InternalWalletState } from "../internal-wallet-state.js"; import { InternalWalletState } from "../internal-wallet-state.js";
import { GetReadOnlyAccess } from "../util/query.js"; import { GetReadOnlyAccess } from "../util/query.js";
import { GlobalIDB } from "@gnu-taler/idb-bridge"; import { GlobalIDB } from "@gnu-taler/idb-bridge";
@ -105,6 +116,32 @@ async function gatherExchangePending(
}); });
} }
/**
* Iterate refresh records based on a filter.
*/
export async function iterRecordsForRefresh(
tx: GetReadOnlyAccess<{
refreshGroups: typeof WalletStoresV1.refreshGroups;
}>,
filter: TransactionRecordFilter,
f: (r: RefreshGroupRecord) => Promise<void>,
): Promise<void> {
let refreshGroups: RefreshGroupRecord[];
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
OperationStatusRange.ACTIVE_START,
OperationStatusRange.ACTIVE_END,
);
refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll(keyRange);
} else {
refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll();
}
for (const r of refreshGroups) {
await f(r);
}
}
async function gatherRefreshPending( async function gatherRefreshPending(
ws: InternalWalletState, ws: InternalWalletState,
tx: GetReadOnlyAccess<{ tx: GetReadOnlyAccess<{
@ -114,22 +151,13 @@ async function gatherRefreshPending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound( await iterRecordsForRefresh(tx, { onlyState: "nonfinal" }, async (r) => {
OperationStatusRange.ACTIVE_START,
OperationStatusRange.ACTIVE_END,
);
const refreshGroups = await tx.refreshGroups.indexes.byStatus.getAll(
keyRange,
);
for (const r of refreshGroups) {
if (r.timestampFinished) { if (r.timestampFinished) {
return; return;
} }
const opId = TaskIdentifiers.forRefresh(r); const opId = TaskIdentifiers.forRefresh(r);
const retryRecord = await tx.operationRetries.get(opId); const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now(); const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
resp.pendingOperations.push({ resp.pendingOperations.push({
type: PendingTaskType.Refresh, type: PendingTaskType.Refresh,
...getPendingCommon(ws, opId, timestampDue), ...getPendingCommon(ws, opId, timestampDue),
@ -140,6 +168,30 @@ async function gatherRefreshPending(
), ),
retryInfo: retryRecord?.retryInfo, retryInfo: retryRecord?.retryInfo,
}); });
});
}
export async function iterRecordsForWithdrawal(
tx: GetReadOnlyAccess<{
withdrawalGroups: typeof WalletStoresV1.withdrawalGroups;
}>,
filter: TransactionRecordFilter,
f: (r: WithdrawalGroupRecord) => Promise<void>,
): Promise<void> {
let withdrawalGroupRecords: WithdrawalGroupRecord[];
if (filter.onlyState === "nonfinal") {
const range = GlobalIDB.KeyRange.bound(
WithdrawalGroupStatus.PendingRegisteringBank,
WithdrawalGroupStatus.PendingAml,
);
withdrawalGroupRecords =
await tx.withdrawalGroups.indexes.byStatus.getAll(range);
} else {
withdrawalGroupRecords =
await tx.withdrawalGroups.indexes.byStatus.getAll();
}
for (const wgr of withdrawalGroupRecords) {
await f(wgr);
} }
} }
@ -153,12 +205,7 @@ async function gatherWithdrawalPending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const range = GlobalIDB.KeyRange.bound( await iterRecordsForWithdrawal(tx, { onlyState: "nonfinal" }, async (wsr) => {
WithdrawalGroupStatus.PendingRegisteringBank,
WithdrawalGroupStatus.PendingAml,
);
const wsrs = await tx.withdrawalGroups.indexes.byStatus.getAll(range);
for (const wsr of wsrs) {
const opTag = TaskIdentifiers.forWithdrawal(wsr); const opTag = TaskIdentifiers.forWithdrawal(wsr);
let opr = await tx.operationRetries.get(opTag); let opr = await tx.operationRetries.get(opTag);
const now = AbsoluteTime.now(); const now = AbsoluteTime.now();
@ -184,6 +231,30 @@ async function gatherWithdrawalPending(
lastError: opr.lastError, lastError: opr.lastError,
retryInfo: opr.retryInfo, retryInfo: opr.retryInfo,
}); });
});
}
export async function iterRecordsForDeposit(
tx: GetReadOnlyAccess<{
depositGroups: typeof WalletStoresV1.depositGroups;
}>,
filter: TransactionRecordFilter,
f: (r: DepositGroupRecord) => Promise<void>,
): Promise<void> {
let dgs: DepositGroupRecord[];
if (filter.onlyState === "nonfinal") {
dgs = await tx.depositGroups.indexes.byStatus.getAll(
GlobalIDB.KeyRange.bound(
DepositOperationStatus.PendingDeposit,
DepositOperationStatus.PendingKyc,
),
);
} else {
dgs = await tx.depositGroups.indexes.byStatus.getAll();
}
for (const dg of dgs) {
await f(dg);
} }
} }
@ -196,16 +267,7 @@ async function gatherDepositPending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const dgs = await tx.depositGroups.indexes.byStatus.getAll( await iterRecordsForDeposit(tx, { onlyState: "nonfinal" }, async (dg) => {
GlobalIDB.KeyRange.bound(
DepositOperationStatus.PendingDeposit,
DepositOperationStatus.PendingKyc,
),
);
for (const dg of dgs) {
if (dg.timestampFinished) {
return;
}
let deposited = true; let deposited = true;
for (const d of dg.depositedPerCoin) { for (const d of dg.depositedPerCoin) {
if (!d) { if (!d) {
@ -226,10 +288,28 @@ async function gatherDepositPending(
lastError: retryRecord?.lastError, lastError: retryRecord?.lastError,
retryInfo: retryRecord?.retryInfo, retryInfo: retryRecord?.retryInfo,
}); });
});
}
export async function iterRecordsForReward(
tx: GetReadOnlyAccess<{
rewards: typeof WalletStoresV1.rewards;
}>,
filter: TransactionRecordFilter,
f: (r: RewardRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const range = GlobalIDB.KeyRange.bound(
RewardRecordStatus.PendingPickup,
RewardRecordStatus.PendingPickup,
);
await tx.rewards.indexes.byStatus.iter(range).forEachAsync(f);
} else {
await tx.rewards.indexes.byStatus.iter().forEachAsync(f);
} }
} }
async function gatherTipPending( async function gatherRewardPending(
ws: InternalWalletState, ws: InternalWalletState,
tx: GetReadOnlyAccess<{ tx: GetReadOnlyAccess<{
rewards: typeof WalletStoresV1.rewards; rewards: typeof WalletStoresV1.rewards;
@ -238,15 +318,7 @@ async function gatherTipPending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const range = GlobalIDB.KeyRange.bound( await iterRecordsForReward(tx, { onlyState: "nonfinal" }, async (tip) => {
RewardRecordStatus.PendingPickup,
RewardRecordStatus.PendingPickup,
);
await tx.rewards.indexes.byStatus.iter(range).forEachAsync(async (tip) => {
// FIXME: The tip record needs a proper status field!
if (tip.pickedUpTimestamp) {
return;
}
const opId = TaskIdentifiers.forTipPickup(tip); const opId = TaskIdentifiers.forTipPickup(tip);
const retryRecord = await tx.operationRetries.get(opId); const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now(); const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
@ -264,6 +336,43 @@ async function gatherTipPending(
}); });
} }
export async function iterRecordsForRefund(
tx: GetReadOnlyAccess<{
refundGroups: typeof WalletStoresV1.refundGroups;
}>,
filter: TransactionRecordFilter,
f: (r: RefundGroupRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.only(
RefundGroupStatus.Pending
);
await tx.refundGroups.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.refundGroups.iter().forEachAsync(f);
}
}
export async function iterRecordsForPurchase(
tx: GetReadOnlyAccess<{
purchases: typeof WalletStoresV1.purchases;
}>,
filter: TransactionRecordFilter,
f: (r: PurchaseRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PurchaseStatus.PendingDownloadingProposal,
PurchaseStatus.PendingAcceptRefund,
);
await tx.purchases.indexes.byStatus.iter(keyRange).forEachAsync(f);
} else {
await tx.purchases.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherPurchasePending( async function gatherPurchasePending(
ws: InternalWalletState, ws: InternalWalletState,
tx: GetReadOnlyAccess<{ tx: GetReadOnlyAccess<{
@ -273,27 +382,20 @@ async function gatherPurchasePending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound( await iterRecordsForPurchase(tx, { onlyState: "nonfinal" }, async (pr) => {
PurchaseStatus.PendingDownloadingProposal, const opId = TaskIdentifiers.forPay(pr);
PurchaseStatus.PendingAcceptRefund, const retryRecord = await tx.operationRetries.get(opId);
); const timestampDue = retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now();
await tx.purchases.indexes.byStatus resp.pendingOperations.push({
.iter(keyRange) type: PendingTaskType.Purchase,
.forEachAsync(async (pr) => { ...getPendingCommon(ws, opId, timestampDue),
const opId = TaskIdentifiers.forPay(pr); givesLifeness: true,
const retryRecord = await tx.operationRetries.get(opId); statusStr: PurchaseStatus[pr.purchaseStatus],
const timestampDue = proposalId: pr.proposalId,
retryRecord?.retryInfo.nextRetry ?? AbsoluteTime.now(); retryInfo: retryRecord?.retryInfo,
resp.pendingOperations.push({ lastError: retryRecord?.lastError,
type: PendingTaskType.Purchase,
...getPendingCommon(ws, opId, timestampDue),
givesLifeness: true,
statusStr: PurchaseStatus[pr.purchaseStatus],
proposalId: pr.proposalId,
retryInfo: retryRecord?.retryInfo,
lastError: retryRecord?.lastError,
});
}); });
});
} }
async function gatherRecoupPending( async function gatherRecoupPending(
@ -362,6 +464,26 @@ async function gatherBackupPending(
}); });
} }
export async function iterRecordsForPeerPullInitiation(
tx: GetReadOnlyAccess<{
peerPullPaymentInitiations: typeof WalletStoresV1.peerPullPaymentInitiations;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPullPaymentInitiationRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPullPaymentInitiationStatus.PendingCreatePurse,
PeerPullPaymentInitiationStatus.AbortingDeletePurse,
);
await tx.peerPullPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPullPaymentInitiations.indexes.byStatus.iter().forEachAsync(f);
}
}
async function gatherPeerPullInitiationPending( async function gatherPeerPullInitiationPending(
ws: InternalWalletState, ws: InternalWalletState,
tx: GetReadOnlyAccess<{ tx: GetReadOnlyAccess<{
@ -371,13 +493,10 @@ async function gatherPeerPullInitiationPending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound( await iterRecordsForPeerPullInitiation(
PeerPullPaymentInitiationStatus.PendingCreatePurse, tx,
PeerPullPaymentInitiationStatus.AbortingDeletePurse, { onlyState: "nonfinal" },
); async (pi) => {
await tx.peerPullPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pi) => {
const opId = TaskIdentifiers.forPeerPullPaymentInitiation(pi); const opId = TaskIdentifiers.forPeerPullPaymentInitiation(pi);
const retryRecord = await tx.operationRetries.get(opId); const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = const timestampDue =
@ -389,7 +508,28 @@ async function gatherPeerPullInitiationPending(
retryInfo: retryRecord?.retryInfo, retryInfo: retryRecord?.retryInfo,
pursePub: pi.pursePub, pursePub: pi.pursePub,
}); });
}); },
);
}
export async function iterRecordsForPeerPullDebit(
tx: GetReadOnlyAccess<{
peerPullPaymentIncoming: typeof WalletStoresV1.peerPullPaymentIncoming;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPullPaymentIncomingRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPullDebitRecordStatus.PendingDeposit,
PeerPullDebitRecordStatus.AbortingRefresh,
);
await tx.peerPullPaymentIncoming.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPullPaymentIncoming.indexes.byStatus.iter().forEachAsync(f);
}
} }
async function gatherPeerPullDebitPending( async function gatherPeerPullDebitPending(
@ -401,13 +541,10 @@ async function gatherPeerPullDebitPending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound( await iterRecordsForPeerPullDebit(
PeerPullDebitRecordStatus.PendingDeposit, tx,
PeerPullDebitRecordStatus.AbortingRefresh, { onlyState: "nonfinal" },
); async (pi) => {
await tx.peerPullPaymentIncoming.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pi) => {
const opId = TaskIdentifiers.forPeerPullPaymentDebit(pi); const opId = TaskIdentifiers.forPeerPullPaymentDebit(pi);
const retryRecord = await tx.operationRetries.get(opId); const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = const timestampDue =
@ -419,7 +556,28 @@ async function gatherPeerPullDebitPending(
retryInfo: retryRecord?.retryInfo, retryInfo: retryRecord?.retryInfo,
peerPullPaymentIncomingId: pi.peerPullPaymentIncomingId, peerPullPaymentIncomingId: pi.peerPullPaymentIncomingId,
}); });
}); },
);
}
export async function iterRecordsForPeerPushInitiation(
tx: GetReadOnlyAccess<{
peerPushPaymentInitiations: typeof WalletStoresV1.peerPushPaymentInitiations;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPushPaymentInitiationRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPushPaymentInitiationStatus.PendingCreatePurse,
PeerPushPaymentInitiationStatus.AbortingRefresh,
);
await tx.peerPushPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPushPaymentInitiations.indexes.byStatus.iter().forEachAsync(f);
}
} }
async function gatherPeerPushInitiationPending( async function gatherPeerPushInitiationPending(
@ -431,13 +589,10 @@ async function gatherPeerPushInitiationPending(
now: AbsoluteTime, now: AbsoluteTime,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
const keyRange = GlobalIDB.KeyRange.bound( await iterRecordsForPeerPushInitiation(
PeerPushPaymentInitiationStatus.PendingCreatePurse, tx,
PeerPushPaymentInitiationStatus.AbortingRefresh, { onlyState: "nonfinal" },
); async (pi) => {
await tx.peerPushPaymentInitiations.indexes.byStatus
.iter(keyRange)
.forEachAsync(async (pi) => {
const opId = TaskIdentifiers.forPeerPushPaymentInitiation(pi); const opId = TaskIdentifiers.forPeerPushPaymentInitiation(pi);
const retryRecord = await tx.operationRetries.get(opId); const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = const timestampDue =
@ -449,7 +604,28 @@ async function gatherPeerPushInitiationPending(
retryInfo: retryRecord?.retryInfo, retryInfo: retryRecord?.retryInfo,
pursePub: pi.pursePub, pursePub: pi.pursePub,
}); });
}); },
);
}
export async function iterRecordsForPeerPushCredit(
tx: GetReadOnlyAccess<{
peerPushPaymentIncoming: typeof WalletStoresV1.peerPushPaymentIncoming;
}>,
filter: TransactionRecordFilter,
f: (r: PeerPushPaymentIncomingRecord) => Promise<void>,
): Promise<void> {
if (filter.onlyState === "nonfinal") {
const keyRange = GlobalIDB.KeyRange.bound(
PeerPushPaymentIncomingStatus.PendingMerge,
PeerPushPaymentIncomingStatus.PendingWithdrawing,
);
await tx.peerPushPaymentIncoming.indexes.byStatus
.iter(keyRange)
.forEachAsync(f);
} else {
await tx.peerPushPaymentIncoming.indexes.byStatus.iter().forEachAsync(f);
}
} }
async function gatherPeerPushCreditPending( async function gatherPeerPushCreditPending(
@ -465,9 +641,10 @@ async function gatherPeerPushCreditPending(
PeerPushPaymentIncomingStatus.PendingMerge, PeerPushPaymentIncomingStatus.PendingMerge,
PeerPushPaymentIncomingStatus.PendingWithdrawing, PeerPushPaymentIncomingStatus.PendingWithdrawing,
); );
await tx.peerPushPaymentIncoming.indexes.byStatus await iterRecordsForPeerPushCredit(
.iter(keyRange) tx,
.forEachAsync(async (pi) => { { onlyState: "nonfinal" },
async (pi) => {
const opId = TaskIdentifiers.forPeerPushCredit(pi); const opId = TaskIdentifiers.forPeerPushCredit(pi);
const retryRecord = await tx.operationRetries.get(opId); const retryRecord = await tx.operationRetries.get(opId);
const timestampDue = const timestampDue =
@ -479,7 +656,8 @@ async function gatherPeerPushCreditPending(
retryInfo: retryRecord?.retryInfo, retryInfo: retryRecord?.retryInfo,
peerPushPaymentIncomingId: pi.peerPushPaymentIncomingId, peerPushPaymentIncomingId: pi.peerPushPaymentIncomingId,
}); });
}); },
);
} }
export async function getPendingOperations( export async function getPendingOperations(
@ -513,7 +691,7 @@ export async function getPendingOperations(
await gatherRefreshPending(ws, tx, now, resp); await gatherRefreshPending(ws, tx, now, resp);
await gatherWithdrawalPending(ws, tx, now, resp); await gatherWithdrawalPending(ws, tx, now, resp);
await gatherDepositPending(ws, tx, now, resp); await gatherDepositPending(ws, tx, now, resp);
await gatherTipPending(ws, tx, now, resp); await gatherRewardPending(ws, tx, now, resp);
await gatherPurchasePending(ws, tx, now, resp); await gatherPurchasePending(ws, tx, now, resp);
await gatherRecoupPending(ws, tx, now, resp); await gatherRecoupPending(ws, tx, now, resp);
await gatherBackupPending(ws, tx, now, resp); await gatherBackupPending(ws, tx, now, resp);

View File

@ -472,12 +472,15 @@ export async function waitUntilDone(ws: InternalWalletState): Promise<void> {
p = openPromise(); p = openPromise();
const txs = await getTransactions(ws, { const txs = await getTransactions(ws, {
includeRefreshes: true, includeRefreshes: true,
filterByState: "nonfinal",
}); });
let finished = true; let finished = true;
for (const tx of txs.transactions) { for (const tx of txs.transactions) {
switch (tx.txState.major) { switch (tx.txState.major) {
case TransactionMajorState.Pending: case TransactionMajorState.Pending:
case TransactionMajorState.Aborting: case TransactionMajorState.Aborting:
case TransactionMajorState.Suspended:
case TransactionMajorState.SuspendedAborting:
finished = false; finished = false;
logger.info( logger.info(
`continuing waiting, ${tx.transactionId} in ${tx.txState.major}(${tx.txState.minor})`, `continuing waiting, ${tx.transactionId} in ${tx.txState.major}(${tx.txState.minor})`,

View File

@ -36,6 +36,7 @@ import {
TransactionByIdRequest, TransactionByIdRequest,
TransactionIdStr, TransactionIdStr,
TransactionMajorState, TransactionMajorState,
TransactionRecordFilter,
TransactionsRequest, TransactionsRequest,
TransactionsResponse, TransactionsResponse,
TransactionState, TransactionState,
@ -153,6 +154,7 @@ import {
resumePeerPushDebitTransaction, resumePeerPushDebitTransaction,
abortPeerPushDebitTransaction, abortPeerPushDebitTransaction,
} from "./pay-peer-push-debit.js"; } from "./pay-peer-push-debit.js";
import { iterRecordsForDeposit, iterRecordsForPeerPullDebit, iterRecordsForPeerPullInitiation, iterRecordsForPeerPushCredit, iterRecordsForPeerPushInitiation, iterRecordsForPurchase, iterRecordsForRefresh, iterRecordsForRefund, iterRecordsForReward, iterRecordsForWithdrawal } from "./pending.js";
const logger = new Logger("taler-wallet-core:transactions.ts"); const logger = new Logger("taler-wallet-core:transactions.ts");
@ -929,6 +931,11 @@ export async function getTransactions(
): Promise<TransactionsResponse> { ): Promise<TransactionsResponse> {
const transactions: Transaction[] = []; const transactions: Transaction[] = [];
const filter: TransactionRecordFilter = {};
if (transactionsRequest?.filterByState) {
filter.onlyState = transactionsRequest.filterByState;
}
await ws.db await ws.db
.mktx((x) => [ .mktx((x) => [
x.coins, x.coins,
@ -952,7 +959,7 @@ export async function getTransactions(
x.refundGroups, x.refundGroups,
]) ])
.runReadOnly(async (tx) => { .runReadOnly(async (tx) => {
tx.peerPushPaymentInitiations.iter().forEachAsync(async (pi) => { await iterRecordsForPeerPushInitiation(tx, filter, async (pi) => {
const amount = Amounts.parseOrThrow(pi.amount); const amount = Amounts.parseOrThrow(pi.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) { if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
@ -968,7 +975,7 @@ export async function getTransactions(
); );
}); });
tx.peerPullPaymentIncoming.iter().forEachAsync(async (pi) => { await iterRecordsForPeerPullDebit(tx, filter, async (pi) => {
const amount = Amounts.parseOrThrow(pi.contractTerms.amount); const amount = Amounts.parseOrThrow(pi.contractTerms.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) { if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
return; return;
@ -986,7 +993,7 @@ export async function getTransactions(
transactions.push(buildTransactionForPullPaymentDebit(pi)); transactions.push(buildTransactionForPullPaymentDebit(pi));
}); });
tx.peerPushPaymentIncoming.iter().forEachAsync(async (pi) => { await iterRecordsForPeerPushCredit(tx, filter, async (pi) => {
if (!pi.currency) { if (!pi.currency) {
// Legacy transaction // Legacy transaction
return; return;
@ -1026,8 +1033,8 @@ export async function getTransactions(
), ),
); );
}); });
tx.peerPullPaymentInitiations.iter().forEachAsync(async (pi) => { await iterRecordsForPeerPullInitiation(tx, filter, async (pi) => {
const currency = Amounts.currencyOf(pi.amount); const currency = Amounts.currencyOf(pi.amount);
if (shouldSkipCurrency(transactionsRequest, currency)) { if (shouldSkipCurrency(transactionsRequest, currency)) {
return; return;
@ -1060,7 +1067,7 @@ export async function getTransactions(
); );
}); });
tx.refundGroups.iter().forEachAsync(async (refundGroup) => { await iterRecordsForRefund(tx, filter, async (refundGroup) => {
const currency = Amounts.currencyOf(refundGroup.amountRaw); const currency = Amounts.currencyOf(refundGroup.amountRaw);
if (shouldSkipCurrency(transactionsRequest, currency)) { if (shouldSkipCurrency(transactionsRequest, currency)) {
return; return;
@ -1071,8 +1078,8 @@ export async function getTransactions(
); );
transactions.push(buildTransactionForRefund(refundGroup, contractData)); transactions.push(buildTransactionForRefund(refundGroup, contractData));
}); });
tx.refreshGroups.iter().forEachAsync(async (rg) => { await iterRecordsForRefresh(tx, filter, async (rg) => {
if (shouldSkipCurrency(transactionsRequest, rg.currency)) { if (shouldSkipCurrency(transactionsRequest, rg.currency)) {
return; return;
} }
@ -1092,7 +1099,7 @@ export async function getTransactions(
} }
}); });
tx.withdrawalGroups.iter().forEachAsync(async (wsr) => { await iterRecordsForWithdrawal(tx, filter ,async (wsr) => {
if ( if (
shouldSkipCurrency( shouldSkipCurrency(
transactionsRequest, transactionsRequest,
@ -1146,7 +1153,7 @@ export async function getTransactions(
} }
}); });
tx.depositGroups.iter().forEachAsync(async (dg) => { await iterRecordsForDeposit(tx, filter, async (dg) => {
const amount = Amounts.parseOrThrow(dg.contractTermsRaw.amount); const amount = Amounts.parseOrThrow(dg.contractTermsRaw.amount);
if (shouldSkipCurrency(transactionsRequest, amount.currency)) { if (shouldSkipCurrency(transactionsRequest, amount.currency)) {
return; return;
@ -1157,7 +1164,7 @@ export async function getTransactions(
transactions.push(buildTransactionForDeposit(dg, retryRecord)); transactions.push(buildTransactionForDeposit(dg, retryRecord));
}); });
tx.purchases.iter().forEachAsync(async (purchase) => { await iterRecordsForPurchase(tx, filter, async (purchase) => {
const download = purchase.download; const download = purchase.download;
if (!download) { if (!download) {
return; return;
@ -1200,7 +1207,7 @@ export async function getTransactions(
); );
}); });
tx.rewards.iter().forEachAsync(async (tipRecord) => { await iterRecordsForReward(tx, filter, async (tipRecord) => {
if ( if (
shouldSkipCurrency( shouldSkipCurrency(
transactionsRequest, transactionsRequest,
@ -1909,4 +1916,5 @@ export function notifyTransition(
transactionId, transactionId,
}); });
} }
ws.workAvailable.trigger();
} }

View File

@ -714,12 +714,6 @@ async function processPlanchetGenerate(
} }
interface WithdrawalRequestBatchArgs { interface WithdrawalRequestBatchArgs {
/**
* Use the batched request on the network level.
* Not supported by older exchanges.
*/
useBatchRequest: boolean;
coinStartIndex: number; coinStartIndex: number;
batchSize: number; batchSize: number;
@ -928,9 +922,6 @@ async function processPlanchetExchangeBatchRequest(
// FIXME: handle individual error codes better! // FIXME: handle individual error codes better!
// FIXME[oec]: add age-withdraw-request here
if (args.useBatchRequest) {
const reqUrl = new URL( const reqUrl = new URL(
`reserves/${withdrawalGroup.reservePub}/batch-withdraw`, `reserves/${withdrawalGroup.reservePub}/batch-withdraw`,
withdrawalGroup.exchangeBaseUrl, withdrawalGroup.exchangeBaseUrl,
@ -956,53 +947,6 @@ async function processPlanchetExchangeBatchRequest(
coinIdxs: [], coinIdxs: [],
}; };
} }
} else {
// We emulate the batch response here by making multiple individual requests
const responses: ExchangeWithdrawBatchResponse = {
ev_sigs: [],
};
const responseCoinIdxs: number[] = [];
for (let i = 0; i < batchReq.planchets.length; i++) {
try {
const p = batchReq.planchets[i];
const reqUrl = new URL(
`reserves/${withdrawalGroup.reservePub}/withdraw`,
withdrawalGroup.exchangeBaseUrl,
).href;
const resp = await ws.http.fetch(reqUrl, { method: "POST", body: p });
if (resp.status === HttpStatusCode.UnavailableForLegalReasons) {
await handleKycRequired(
ws,
withdrawalGroup,
resp,
i,
requestCoinIdxs,
);
// We still return blinded coins that we could actually withdraw.
return {
coinIdxs: responseCoinIdxs,
batchResp: responses,
};
}
const r = await readSuccessResponseJsonOrThrow(
resp,
codecForWithdrawResponse(),
);
responses.ev_sigs.push(r);
responseCoinIdxs.push(requestCoinIdxs[i]);
} catch (e) {
if (e instanceof TalerError) {
logger.warn(`withdrawing planchet failed: ${j2s(e.errorDetail)}`);
logger.warn(`planchet denom pub hash: ${batchReq.planchets[i].denom_pub_hash}`);
}
await storeCoinError(e, requestCoinIdxs[i]);
}
}
return {
coinIdxs: responseCoinIdxs,
batchResp: responses,
};
}
} }
async function processPlanchetVerifyAndStoreCoin( async function processPlanchetVerifyAndStoreCoin(
@ -1555,7 +1499,6 @@ async function processWithdrawalGroupPendingReady(
const resp = await processPlanchetExchangeBatchRequest(ws, wgContext, { const resp = await processPlanchetExchangeBatchRequest(ws, wgContext, {
batchSize: maxBatchSize, batchSize: maxBatchSize,
coinStartIndex: i, coinStartIndex: i,
useBatchRequest: ws.config.features.batchWithdrawal,
}); });
let work: Promise<void>[] = []; let work: Promise<void>[] = [];
work = []; work = [];

View File

@ -23,6 +23,8 @@ export interface OpenedPromise<T> {
/** /**
* Get an unresolved promise together with its extracted resolve / reject * Get an unresolved promise together with its extracted resolve / reject
* function. * function.
*
* Recent ECMAScript proposals also call this a promise capability.
*/ */
export function openPromise<T>(): OpenedPromise<T> { export function openPromise<T>(): OpenedPromise<T> {
let resolve: ((x?: any) => void) | null = null; let resolve: ((x?: any) => void) | null = null;
@ -39,22 +41,20 @@ export function openPromise<T>(): OpenedPromise<T> {
} }
export class AsyncCondition { export class AsyncCondition {
private _waitPromise: Promise<void>; private promCap?: OpenedPromise<void> = undefined;
private _resolveWaitPromise: (val: void) => void; constructor() {}
constructor() {
const op = openPromise<void>();
this._waitPromise = op.promise;
this._resolveWaitPromise = op.resolve;
}
wait(): Promise<void> { wait(): Promise<void> {
return this._waitPromise; if (!this.promCap) {
this.promCap = openPromise<void>();
}
return this.promCap.promise;
} }
trigger(): void { trigger(): void {
this._resolveWaitPromise(); if (this.promCap) {
const op = openPromise<void>(); this.promCap.resolve();
this._waitPromise = op.promise; }
this._resolveWaitPromise = op.resolve; this.promCap = undefined;
} }
} }

View File

@ -338,7 +338,7 @@ interface IndexReadOnlyAccessor<RecordType> {
iter(query?: IDBKeyRange | IDBValidKey): ResultStream<RecordType>; iter(query?: IDBKeyRange | IDBValidKey): ResultStream<RecordType>;
get(query: IDBValidKey): Promise<RecordType | undefined>; get(query: IDBValidKey): Promise<RecordType | undefined>;
getAll( getAll(
query: IDBKeyRange | IDBValidKey, query?: IDBKeyRange | IDBValidKey,
count?: number, count?: number,
): Promise<RecordType[]>; ): Promise<RecordType[]>;
} }
@ -351,7 +351,7 @@ interface IndexReadWriteAccessor<RecordType> {
iter(query: IDBKeyRange | IDBValidKey): ResultStream<RecordType>; iter(query: IDBKeyRange | IDBValidKey): ResultStream<RecordType>;
get(query: IDBValidKey): Promise<RecordType | undefined>; get(query: IDBValidKey): Promise<RecordType | undefined>;
getAll( getAll(
query: IDBKeyRange | IDBValidKey, query?: IDBKeyRange | IDBValidKey,
count?: number, count?: number,
): Promise<RecordType[]>; ): Promise<RecordType[]>;
} }

View File

@ -272,7 +272,6 @@ export interface WalletConfig {
* Configurations values that may be safe to show to the user * Configurations values that may be safe to show to the user
*/ */
features: { features: {
batchWithdrawal: boolean;
allowHttp: boolean; allowHttp: boolean;
}; };
} }

View File

@ -479,6 +479,7 @@ async function runTaskLoop(
// Wait until either the timeout, or we are notified (via the latch) // Wait until either the timeout, or we are notified (via the latch)
// that more work might be available. // that more work might be available.
await Promise.race([timeout, ws.workAvailable.wait()]); await Promise.race([timeout, ws.workAvailable.wait()]);
logger.trace(`done waiting for available work`);
} else { } else {
logger.trace( logger.trace(
`running ${pending.pendingOperations.length} pending operations`, `running ${pending.pendingOperations.length} pending operations`,
@ -1695,7 +1696,6 @@ export class Wallet {
], ],
}, },
features: { features: {
batchWithdrawal: false,
allowHttp: false, allowHttp: false,
}, },
testing: { testing: {

View File

@ -51,7 +51,7 @@ export const buildConfig = {
target: [ target: [
'es2020' 'es2020'
], ],
external: ["os", "std"], external: ["os", "std", "better-sqlite3"],
format: 'esm', format: 'esm',
platform: 'neutral', platform: 'neutral',
mainFields: ["module", "main"], mainFields: ["module", "main"],

View File

@ -27,9 +27,9 @@ import {
CoreApiResponseSuccess, CoreApiResponseSuccess,
getErrorDetailFromException, getErrorDetailFromException,
InitRequest, InitRequest,
j2s,
Logger, Logger,
setGlobalLogLevelFromString, setGlobalLogLevelFromString,
setPRNG,
WalletNotification, WalletNotification,
} from "@gnu-taler/taler-util"; } from "@gnu-taler/taler-util";
import { createPlatformHttpLib } from "@gnu-taler/taler-util/http"; import { createPlatformHttpLib } from "@gnu-taler/taler-util/http";
@ -47,20 +47,11 @@ import {
getRecoveryStartState, getRecoveryStartState,
discoverPolicies, discoverPolicies,
mergeDiscoveryAggregate, mergeDiscoveryAggregate,
ReducerState,
} from "@gnu-taler/anastasis-core"; } from "@gnu-taler/anastasis-core";
import { userIdentifierDerive } from "@gnu-taler/anastasis-core/lib/crypto.js"; import { userIdentifierDerive } from "@gnu-taler/anastasis-core/lib/crypto.js";
setGlobalLogLevelFromString("trace"); setGlobalLogLevelFromString("trace");
setPRNG(function (x: Uint8Array, n: number) {
// @ts-ignore
const va = globalThis._tart.randomBytes(n);
const v = new Uint8Array(va);
for (let i = 0; i < n; i++) x[i] = v[i];
for (let i = 0; i < v.length; i++) v[i] = 0;
});
const logger = new Logger("taler-wallet-embedded/index.ts"); const logger = new Logger("taler-wallet-embedded/index.ts");
/** /**
@ -222,6 +213,8 @@ async function handleAnastasisRequest(
cursor: discoverRes.cursor, cursor: discoverRes.cursor,
}, },
}); });
default:
throw Error("unsupported anastasis operation");
} }
} }
@ -295,10 +288,10 @@ export async function testWithGv() {
}); });
} }
export async function testWithLocal() { export async function testWithLocal(path: string) {
console.log("running local test"); console.log("running local test");
const w = await createNativeWalletHost2({ const w = await createNativeWalletHost2({
persistentStoragePath: "walletdb.json", persistentStoragePath: path ?? "walletdb.json",
config: { config: {
features: { features: {
allowHttp: true, allowHttp: true,
@ -310,7 +303,7 @@ export async function testWithLocal() {
skipDefaults: true, skipDefaults: true,
}); });
console.log("initialized wallet"); console.log("initialized wallet");
await w.wallet.client.call(WalletApiOperation.RunIntegrationTestV2, { await w.wallet.client.call(WalletApiOperation.RunIntegrationTest, {
amountToSpend: "TESTKUDOS:1", amountToSpend: "TESTKUDOS:1",
amountToWithdraw: "TESTKUDOS:3", amountToWithdraw: "TESTKUDOS:3",
bankAccessApiBaseUrl: "http://localhost:8082/taler-bank-access/", bankAccessApiBaseUrl: "http://localhost:8082/taler-bank-access/",
@ -323,6 +316,7 @@ export async function testWithLocal() {
}); });
console.log("done with task loop"); console.log("done with task loop");
w.wallet.stop(); w.wallet.stop();
console.log("DB stats:", j2s(w.getDbStats()));
} }
export async function testArgon2id() { export async function testArgon2id() {
@ -357,4 +351,4 @@ globalThis.testArgon2id = testArgon2id;
// @ts-ignore // @ts-ignore
globalThis.testReduceAction = reduceAction; globalThis.testReduceAction = reduceAction;
// @ts-ignore // @ts-ignore
globalThis.testDiscoverPolicies = discoverPolicies; globalThis.testDiscoverPolicies = discoverPolicies;

View File

@ -34,7 +34,6 @@ function parse_json_or_undefined<T>(str: string | undefined): T | undefined {
export const codecForSettings = (): Codec<Settings> => export const codecForSettings = (): Codec<Settings> =>
buildCodecForObject<Settings>() buildCodecForObject<Settings>()
.property("walletAllowHttp", codecForBoolean()) .property("walletAllowHttp", codecForBoolean())
.property("walletBatchWithdrawal", codecForBoolean())
.property("injectTalerSupport", codecForBoolean()) .property("injectTalerSupport", codecForBoolean())
.property("advanceMode", codecForBoolean()) .property("advanceMode", codecForBoolean())
.property("backup", codecForBoolean()) .property("backup", codecForBoolean())

View File

@ -17,7 +17,7 @@ msgstr ""
"Project-Id-Version: Taler Wallet\n" "Project-Id-Version: Taler Wallet\n"
"Report-Msgid-Bugs-To: languages@taler.net\n" "Report-Msgid-Bugs-To: languages@taler.net\n"
"POT-Creation-Date: 2016-11-23 00:00+0100\n" "POT-Creation-Date: 2016-11-23 00:00+0100\n"
"PO-Revision-Date: 2023-08-15 07:28+0000\n" "PO-Revision-Date: 2023-08-16 12:43+0000\n"
"Last-Translator: Krystian Baran <kiszkot@murena.io>\n" "Last-Translator: Krystian Baran <kiszkot@murena.io>\n"
"Language-Team: Italian <https://weblate.taler.net/projects/gnu-taler/" "Language-Team: Italian <https://weblate.taler.net/projects/gnu-taler/"
"webextensions/it/>\n" "webextensions/it/>\n"
@ -31,7 +31,7 @@ msgstr ""
#: src/NavigationBar.tsx:139 #: src/NavigationBar.tsx:139
#, c-format #, c-format
msgid "Balance" msgid "Balance"
msgstr "" msgstr "Saldo"
#: src/NavigationBar.tsx:142 #: src/NavigationBar.tsx:142
#, c-format #, c-format
@ -41,12 +41,12 @@ msgstr ""
#: src/NavigationBar.tsx:147 #: src/NavigationBar.tsx:147
#, c-format #, c-format
msgid "QR Reader and Taler URI" msgid "QR Reader and Taler URI"
msgstr "" msgstr "Lettore QR e Taler URI"
#: src/NavigationBar.tsx:154 #: src/NavigationBar.tsx:154
#, c-format #, c-format
msgid "Settings" msgid "Settings"
msgstr "" msgstr "Impostazioni"
#: src/NavigationBar.tsx:184 #: src/NavigationBar.tsx:184
#, c-format #, c-format

View File

@ -119,7 +119,6 @@ export const defaultSettings: Settings = {
showJsonOnError: false, showJsonOnError: false,
extendedAccountTypes: false, extendedAccountTypes: false,
walletAllowHttp: false, walletAllowHttp: false,
walletBatchWithdrawal: false,
}; };
/** /**

View File

@ -316,10 +316,6 @@ function AdvanceSettings(): VNode {
label: i18n.str`Allow HTTP connections`, label: i18n.str`Allow HTTP connections`,
description: i18n.str`Using HTTP connection may be faster but unsafe (wallet restart required)`, description: i18n.str`Using HTTP connection may be faster but unsafe (wallet restart required)`,
}, },
walletBatchWithdrawal: {
label: i18n.str`Allow batch withdrawals`,
description: i18n.str`Using the batch withdrawal API allows faster withdrawals (wallet restart required)`,
},
langSelector: { langSelector: {
label: i18n.str`Lang selector`, label: i18n.str`Lang selector`,
description: i18n.str`Allows to manually change the language of the UI. Otherwise it will be automatically selected by your browser configuration.`, description: i18n.str`Allows to manually change the language of the UI. Otherwise it will be automatically selected by your browser configuration.`,

View File

@ -332,7 +332,6 @@ async function reinitWallet(): Promise<void> {
{ {
features: { features: {
allowHttp: settings.walletAllowHttp, allowHttp: settings.walletAllowHttp,
batchWithdrawal: settings.walletBatchWithdrawal,
}, },
}, },
); );

File diff suppressed because it is too large Load Diff