fix DB indexing issues

This commit is contained in:
Florian Dold 2022-01-11 21:00:12 +01:00
parent a05e891d6e
commit a74cdf0529
No known key found for this signature in database
GPG Key ID: D2E4F00F29D02A4B
17 changed files with 455 additions and 178 deletions

View File

@ -229,6 +229,16 @@ function furthestKey(
} }
} }
export interface AccessStats {
writeTransactions: number;
readTransactions: number;
writesPerStore: Record<string, number>;
readsPerStore: Record<string, number>;
readsPerIndex: Record<string, number>;
readItemsPerIndex: Record<string, number>;
readItemsPerStore: Record<string, number>;
}
/** /**
* Primitive in-memory backend. * Primitive in-memory backend.
* *
@ -266,6 +276,18 @@ export class MemoryBackend implements Backend {
enableTracing: boolean = false; enableTracing: boolean = false;
trackStats: boolean = true;
accessStats: AccessStats = {
readTransactions: 0,
writeTransactions: 0,
readsPerStore: {},
readsPerIndex: {},
readItemsPerIndex: {},
readItemsPerStore: {},
writesPerStore: {},
};
/** /**
* Load the data in this IndexedDB backend from a dump in JSON format. * Load the data in this IndexedDB backend from a dump in JSON format.
* *
@ -512,6 +534,14 @@ export class MemoryBackend implements Backend {
throw Error("unsupported transaction mode"); throw Error("unsupported transaction mode");
} }
if (this.trackStats) {
if (mode === "readonly") {
this.accessStats.readTransactions++;
} else if (mode === "readwrite") {
this.accessStats.writeTransactions++;
}
}
myDb.txRestrictObjectStores = [...objectStores]; myDb.txRestrictObjectStores = [...objectStores];
this.connectionsByTransaction[transactionCookie] = myConn; this.connectionsByTransaction[transactionCookie] = myConn;
@ -1153,6 +1183,13 @@ export class MemoryBackend implements Backend {
lastIndexPosition: req.lastIndexPosition, lastIndexPosition: req.lastIndexPosition,
lastObjectStorePosition: req.lastObjectStorePosition, lastObjectStorePosition: req.lastObjectStorePosition,
}); });
if (this.trackStats) {
const k = `${req.objectStoreName}.${req.indexName}`;
this.accessStats.readsPerIndex[k] =
(this.accessStats.readsPerIndex[k] ?? 0) + 1;
this.accessStats.readItemsPerIndex[k] =
(this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count;
}
} else { } else {
if (req.advanceIndexKey !== undefined) { if (req.advanceIndexKey !== undefined) {
throw Error("unsupported request"); throw Error("unsupported request");
@ -1167,6 +1204,13 @@ export class MemoryBackend implements Backend {
lastIndexPosition: req.lastIndexPosition, lastIndexPosition: req.lastIndexPosition,
lastObjectStorePosition: req.lastObjectStorePosition, lastObjectStorePosition: req.lastObjectStorePosition,
}); });
if (this.trackStats) {
const k = `${req.objectStoreName}`;
this.accessStats.readsPerStore[k] =
(this.accessStats.readsPerStore[k] ?? 0) + 1;
this.accessStats.readItemsPerStore[k] =
(this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
}
} }
if (this.enableTracing) { if (this.enableTracing) {
console.log(`TRACING: getRecords got ${resp.count} results`); console.log(`TRACING: getRecords got ${resp.count} results`);
@ -1180,6 +1224,11 @@ export class MemoryBackend implements Backend {
): Promise<RecordStoreResponse> { ): Promise<RecordStoreResponse> {
if (this.enableTracing) { if (this.enableTracing) {
console.log(`TRACING: storeRecord`); console.log(`TRACING: storeRecord`);
console.log(
`key ${storeReq.key}, record ${JSON.stringify(
structuredEncapsulate(storeReq.value),
)}`,
);
} }
const myConn = this.requireConnectionFromTransaction(btx); const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName]; const db = this.databases[myConn.dbName];
@ -1199,6 +1248,12 @@ export class MemoryBackend implements Backend {
}', transaction is over ${JSON.stringify(db.txRestrictObjectStores)}`, }', transaction is over ${JSON.stringify(db.txRestrictObjectStores)}`,
); );
} }
if (this.trackStats) {
this.accessStats.writesPerStore[storeReq.objectStoreName] =
(this.accessStats.writesPerStore[storeReq.objectStoreName] ?? 0) + 1;
}
const schema = myConn.modifiedSchema; const schema = myConn.modifiedSchema;
const objectStoreMapEntry = myConn.objectStoreMap[storeReq.objectStoreName]; const objectStoreMapEntry = myConn.objectStoreMap[storeReq.objectStoreName];
@ -1275,7 +1330,9 @@ export class MemoryBackend implements Backend {
} }
} }
const objectStoreRecord: ObjectStoreRecord = { const oldStoreRecord = modifiedData.get(key);
const newObjectStoreRecord: ObjectStoreRecord = {
// FIXME: We should serialize the key here, not just clone it. // FIXME: We should serialize the key here, not just clone it.
primaryKey: structuredClone(key), primaryKey: structuredClone(key),
value: structuredClone(value), value: structuredClone(value),
@ -1283,7 +1340,7 @@ export class MemoryBackend implements Backend {
objectStoreMapEntry.store.modifiedData = modifiedData.with( objectStoreMapEntry.store.modifiedData = modifiedData.with(
key, key,
objectStoreRecord, newObjectStoreRecord,
true, true,
); );
@ -1297,6 +1354,11 @@ export class MemoryBackend implements Backend {
} }
const indexProperties = const indexProperties =
schema.objectStores[storeReq.objectStoreName].indexes[indexName]; schema.objectStores[storeReq.objectStoreName].indexes[indexName];
// Remove old index entry first!
if (oldStoreRecord) {
this.deleteFromIndex(index, key, oldStoreRecord.value, indexProperties);
}
try { try {
this.insertIntoIndex(index, key, value, indexProperties); this.insertIntoIndex(index, key, value, indexProperties);
} catch (e) { } catch (e) {
@ -1482,31 +1544,28 @@ function getIndexRecords(req: {
const primaryKeys: Key[] = []; const primaryKeys: Key[] = [];
const values: Value[] = []; const values: Value[] = [];
const { unique, range, forward, indexData } = req; const { unique, range, forward, indexData } = req;
let indexPos = req.lastIndexPosition;
let objectStorePos: IDBValidKey | undefined = undefined;
let indexEntry: IndexRecord | undefined = undefined;
const rangeStart = forward ? range.lower : range.upper;
const dataStart = forward ? indexData.minKey() : indexData.maxKey();
indexPos = furthestKey(forward, indexPos, rangeStart);
indexPos = furthestKey(forward, indexPos, dataStart);
function nextIndexEntry(): IndexRecord | undefined { function nextIndexEntry(prevPos: IDBValidKey): IndexRecord | undefined {
assertInvariant(indexPos != null);
const res: [IDBValidKey, IndexRecord] | undefined = forward const res: [IDBValidKey, IndexRecord] | undefined = forward
? indexData.nextHigherPair(indexPos) ? indexData.nextHigherPair(prevPos)
: indexData.nextLowerPair(indexPos); : indexData.nextLowerPair(prevPos);
if (res) { return res ? res[1] : undefined;
indexEntry = res[1];
indexPos = indexEntry.indexKey;
return indexEntry;
} else {
indexEntry = undefined;
indexPos = undefined;
return undefined;
}
} }
function packResult(): RecordGetResponse { function packResult(): RecordGetResponse {
// Collect the values based on the primary keys,
// if requested.
if (req.resultLevel === ResultLevel.Full) {
for (let i = 0; i < numResults; i++) {
const result = req.storeData.get(primaryKeys[i]);
if (!result) {
console.error("invariant violated during read");
console.error("request was", req);
throw Error("invariant violated during read");
}
values.push(structuredClone(result.value));
}
}
return { return {
count: numResults, count: numResults,
indexKeys: indexKeys:
@ -1517,18 +1576,39 @@ function getIndexRecords(req: {
}; };
} }
if (indexPos == null) { let firstIndexPos = req.lastIndexPosition;
{
const rangeStart = forward ? range.lower : range.upper;
const dataStart = forward ? indexData.minKey() : indexData.maxKey();
firstIndexPos = furthestKey(forward, firstIndexPos, rangeStart);
firstIndexPos = furthestKey(forward, firstIndexPos, dataStart);
}
if (firstIndexPos == null) {
return packResult(); return packResult();
} }
let objectStorePos: IDBValidKey | undefined = undefined;
let indexEntry: IndexRecord | undefined = undefined;
// Now we align at indexPos and after objectStorePos // Now we align at indexPos and after objectStorePos
indexEntry = indexData.get(indexPos); indexEntry = indexData.get(firstIndexPos);
if (!indexEntry) { if (!indexEntry) {
// We're not aligned to an index key, go to next index entry // We're not aligned to an index key, go to next index entry
nextIndexEntry(); indexEntry = nextIndexEntry(firstIndexPos);
} if (!indexEntry) {
if (indexEntry) { return packResult();
}
objectStorePos = nextKey(true, indexEntry.primaryKeys, undefined);
} else if (
req.lastIndexPosition != null &&
compareKeys(req.lastIndexPosition, indexEntry.indexKey) !== 0
) {
// We're already past the desired lastIndexPosition, don't use
// lastObjectStorePosition.
objectStorePos = nextKey(true, indexEntry.primaryKeys, undefined);
} else {
objectStorePos = nextKey( objectStorePos = nextKey(
true, true,
indexEntry.primaryKeys, indexEntry.primaryKeys,
@ -1536,43 +1616,56 @@ function getIndexRecords(req: {
); );
} }
// Now skip lower/upper bound of open ranges
if ( if (
forward && forward &&
range.lowerOpen && range.lowerOpen &&
range.lower != null && range.lower != null &&
compareKeys(range.lower, indexPos) === 0 compareKeys(range.lower, indexEntry.indexKey) === 0
) { ) {
const e = nextIndexEntry(); indexEntry = nextIndexEntry(indexEntry.indexKey);
objectStorePos = e?.primaryKeys.minKey(); if (!indexEntry) {
return packResult();
}
objectStorePos = indexEntry.primaryKeys.minKey();
} }
if ( if (
!forward && !forward &&
range.upperOpen && range.upperOpen &&
range.upper != null && range.upper != null &&
compareKeys(range.upper, indexPos) === 0 compareKeys(range.upper, indexEntry.indexKey) === 0
) { ) {
const e = nextIndexEntry(); indexEntry = nextIndexEntry(indexEntry.indexKey);
objectStorePos = e?.primaryKeys.minKey(); if (!indexEntry) {
return packResult();
}
objectStorePos = indexEntry.primaryKeys.minKey();
} }
// If requested, return only unique results
if ( if (
unique && unique &&
indexPos != null &&
req.lastIndexPosition != null && req.lastIndexPosition != null &&
compareKeys(indexPos, req.lastIndexPosition) === 0 compareKeys(indexEntry.indexKey, req.lastIndexPosition) === 0
) { ) {
const e = nextIndexEntry(); indexEntry = nextIndexEntry(indexEntry.indexKey);
objectStorePos = e?.primaryKeys.minKey(); if (!indexEntry) {
return packResult();
}
objectStorePos = indexEntry.primaryKeys.minKey();
} }
if (req.advancePrimaryKey) { if (req.advanceIndexKey != null) {
indexPos = furthestKey(forward, indexPos, req.advanceIndexKey); const ik = furthestKey(forward, indexEntry.indexKey, req.advanceIndexKey)!;
if (indexPos) { indexEntry = indexData.get(ik);
indexEntry = indexData.get(indexPos); if (!indexEntry) {
if (!indexEntry) { indexEntry = nextIndexEntry(ik);
nextIndexEntry(); }
} if (!indexEntry) {
return packResult();
} }
} }
@ -1580,9 +1673,7 @@ function getIndexRecords(req: {
if ( if (
req.advanceIndexKey != null && req.advanceIndexKey != null &&
req.advancePrimaryKey && req.advancePrimaryKey &&
indexPos != null && compareKeys(indexEntry.indexKey, req.advanceIndexKey) == 0
indexEntry &&
compareKeys(indexPos, req.advanceIndexKey) == 0
) { ) {
if ( if (
objectStorePos == null || objectStorePos == null ||
@ -1597,13 +1688,10 @@ function getIndexRecords(req: {
} }
while (1) { while (1) {
if (indexPos === undefined) {
break;
}
if (req.limit != 0 && numResults == req.limit) { if (req.limit != 0 && numResults == req.limit) {
break; break;
} }
if (!range.includes(indexPos)) { if (!range.includes(indexEntry.indexKey)) {
break; break;
} }
if (indexEntry === undefined) { if (indexEntry === undefined) {
@ -1611,14 +1699,16 @@ function getIndexRecords(req: {
} }
if (objectStorePos == null) { if (objectStorePos == null) {
// We don't have any more records with the current index key. // We don't have any more records with the current index key.
nextIndexEntry(); indexEntry = nextIndexEntry(indexEntry.indexKey);
if (indexEntry) { if (!indexEntry) {
objectStorePos = indexEntry.primaryKeys.minKey(); return packResult();
} }
objectStorePos = indexEntry.primaryKeys.minKey();
continue; continue;
} }
indexKeys.push(indexEntry.indexKey);
primaryKeys.push(objectStorePos); indexKeys.push(structuredClone(indexEntry.indexKey));
primaryKeys.push(structuredClone(objectStorePos));
numResults++; numResults++;
if (unique) { if (unique) {
objectStorePos = undefined; objectStorePos = undefined;
@ -1627,20 +1717,6 @@ function getIndexRecords(req: {
} }
} }
// Now we can collect the values based on the primary keys,
// if requested.
if (req.resultLevel === ResultLevel.Full) {
for (let i = 0; i < numResults; i++) {
const result = req.storeData.get(primaryKeys[i]);
if (!result) {
console.error("invariant violated during read");
console.error("request was", req);
throw Error("invariant violated during read");
}
values.push(result.value);
}
}
return packResult(); return packResult();
} }

View File

@ -64,7 +64,10 @@ import { makeStoreKeyValue } from "./util/makeStoreKeyValue";
import { normalizeKeyPath } from "./util/normalizeKeyPath"; import { normalizeKeyPath } from "./util/normalizeKeyPath";
import { openPromise } from "./util/openPromise"; import { openPromise } from "./util/openPromise";
import queueTask from "./util/queueTask"; import queueTask from "./util/queueTask";
import { structuredClone } from "./util/structuredClone"; import {
checkStructuredCloneOrThrow,
structuredClone,
} from "./util/structuredClone";
import { validateKeyPath } from "./util/validateKeyPath"; import { validateKeyPath } from "./util/validateKeyPath";
import { valueToKey } from "./util/valueToKey"; import { valueToKey } from "./util/valueToKey";
@ -303,7 +306,7 @@ export class BridgeIDBCursor implements IDBCursor {
try { try {
// Only called for the side effect of throwing an exception // Only called for the side effect of throwing an exception
structuredClone(value); checkStructuredCloneOrThrow(value);
} catch (e) { } catch (e) {
throw new DataCloneError(); throw new DataCloneError();
} }
@ -327,6 +330,7 @@ export class BridgeIDBCursor implements IDBCursor {
} }
const { btx } = this.source._confirmStartedBackendTransaction(); const { btx } = this.source._confirmStartedBackendTransaction();
await this._backend.storeRecord(btx, storeReq); await this._backend.storeRecord(btx, storeReq);
// FIXME: update the index position here!
}; };
return transaction._execRequestAsync({ return transaction._execRequestAsync({
operation, operation,

View File

@ -10,7 +10,6 @@ import {
// IDBCursor.update() - index - modify a record in the object store // IDBCursor.update() - index - modify a record in the object store
test.cb("WPT test idbcursor_update_index.htm", (t) => { test.cb("WPT test idbcursor_update_index.htm", (t) => {
var db: any, var db: any,
count = 0,
records = [ records = [
{ pKey: "primaryKey_0", iKey: "indexKey_0" }, { pKey: "primaryKey_0", iKey: "indexKey_0" },
{ pKey: "primaryKey_1", iKey: "indexKey_1" }, { pKey: "primaryKey_1", iKey: "indexKey_1" },

View File

@ -72,6 +72,7 @@ export type {
}; };
export { MemoryBackend } from "./MemoryBackend"; export { MemoryBackend } from "./MemoryBackend";
export type { AccessStats } from "./MemoryBackend";
// globalThis polyfill, see https://mathiasbynens.be/notes/globalthis // globalThis polyfill, see https://mathiasbynens.be/notes/globalthis
(function () { (function () {

View File

@ -171,6 +171,75 @@ export function mkDeepClone() {
} }
} }
/**
* Check if an object is deeply cloneable.
* Only called for the side-effect of throwing an exception.
*/
export function mkDeepCloneCheckOnly() {
const refs = [] as any;
return clone;
function cloneArray(a: any) {
var keys = Object.keys(a);
refs.push(a);
for (var i = 0; i < keys.length; i++) {
var k = keys[i] as any;
var cur = a[k];
checkCloneableOrThrow(cur);
if (typeof cur !== "object" || cur === null) {
// do nothing
} else if (cur instanceof Date) {
// do nothing
} else if (ArrayBuffer.isView(cur)) {
// do nothing
} else {
var index = refs.indexOf(cur);
if (index !== -1) {
// do nothing
} else {
clone(cur);
}
}
}
refs.pop();
}
function clone(o: any) {
checkCloneableOrThrow(o);
if (typeof o !== "object" || o === null) return o;
if (o instanceof Date) return;
if (Array.isArray(o)) return cloneArray(o);
if (o instanceof Map) return cloneArray(Array.from(o));
if (o instanceof Set) return cloneArray(Array.from(o));
refs.push(o);
for (var k in o) {
if (Object.hasOwnProperty.call(o, k) === false) continue;
var cur = o[k] as any;
checkCloneableOrThrow(cur);
if (typeof cur !== "object" || cur === null) {
// do nothing
} else if (cur instanceof Date) {
// do nothing
} else if (cur instanceof Map) {
cloneArray(Array.from(cur));
} else if (cur instanceof Set) {
cloneArray(Array.from(cur));
} else if (ArrayBuffer.isView(cur)) {
// do nothing
} else {
var i = refs.indexOf(cur);
if (i !== -1) {
// do nothing
} else {
clone(cur);
}
}
}
refs.pop();
}
}
function internalEncapsulate( function internalEncapsulate(
val: any, val: any,
outRoot: any, outRoot: any,
@ -358,3 +427,10 @@ export function structuredRevive(val: any): any {
export function structuredClone(val: any): any { export function structuredClone(val: any): any {
return mkDeepClone()(val); return mkDeepClone()(val);
} }
/**
* Structured clone for IndexedDB.
*/
export function checkStructuredCloneOrThrow(val: any): void {
return mkDeepCloneCheckOnly()(val);
}

View File

@ -22,13 +22,15 @@ import {
codecForNumber, codecForNumber,
codecForString, codecForString,
codecOptional, codecOptional,
j2s,
Logger, Logger,
} from "@gnu-taler/taler-util"; } from "@gnu-taler/taler-util";
import { import {
getDefaultNodeWallet, getDefaultNodeWallet2,
NodeHttpLib, NodeHttpLib,
WalletApiOperation, WalletApiOperation,
Wallet, Wallet,
AccessStats,
} from "@gnu-taler/taler-wallet-core"; } from "@gnu-taler/taler-wallet-core";
/** /**
@ -64,6 +66,7 @@ export async function runBench1(configJson: any): Promise<void> {
} }
let wallet = {} as Wallet; let wallet = {} as Wallet;
let getDbStats: () => AccessStats;
for (let i = 0; i < numIter; i++) { for (let i = 0; i < numIter; i++) {
// Create a new wallet in each iteration // Create a new wallet in each iteration
@ -72,12 +75,16 @@ export async function runBench1(configJson: any): Promise<void> {
if (i % restartWallet == 0) { if (i % restartWallet == 0) {
if (Object.keys(wallet).length !== 0) { if (Object.keys(wallet).length !== 0) {
wallet.stop(); wallet.stop();
console.log("wallet DB stats", j2s(getDbStats!()));
} }
wallet = await getDefaultNodeWallet({
const res = await getDefaultNodeWallet2({
// No persistent DB storage. // No persistent DB storage.
persistentStoragePath: undefined, persistentStoragePath: undefined,
httpLib: myHttpLib, httpLib: myHttpLib,
}); });
wallet = res.wallet;
getDbStats = res.getDbStats;
if (trustExchange) { if (trustExchange) {
wallet.setInsecureTrustExchange(); wallet.setInsecureTrustExchange();
} }
@ -119,6 +126,7 @@ export async function runBench1(configJson: any): Promise<void> {
} }
wallet.stop(); wallet.stop();
console.log("wallet DB stats", j2s(getDbStats!()));
} }
/** /**

View File

@ -42,6 +42,7 @@ import {
import { RetryInfo } from "./util/retries.js"; import { RetryInfo } from "./util/retries.js";
import { PayCoinSelection } from "./util/coinSelection.js"; import { PayCoinSelection } from "./util/coinSelection.js";
import { Event, IDBDatabase } from "@gnu-taler/idb-bridge"; import { Event, IDBDatabase } from "@gnu-taler/idb-bridge";
import { PendingTaskInfo } from "./pending-types.js";
/** /**
* Name of the Taler database. This is effectively the major * Name of the Taler database. This is effectively the major
@ -153,6 +154,8 @@ export interface ReserveRecord {
*/ */
timestampCreated: Timestamp; timestampCreated: Timestamp;
operationStatus: OperationStatus;
/** /**
* Time when the information about this reserve was posted to the bank. * Time when the information about this reserve was posted to the bank.
* *
@ -914,10 +917,19 @@ export enum RefreshCoinStatus {
Frozen = "frozen", Frozen = "frozen",
} }
export enum OperationStatus {
Finished = "finished",
Pending = "pending",
}
export interface RefreshGroupRecord { export interface RefreshGroupRecord {
operationStatus: OperationStatus;
/** /**
* Retry info, even present when the operation isn't active to allow indexing * Retry info, even present when the operation isn't active to allow indexing
* on the next retry timestamp. * on the next retry timestamp.
*
* FIXME: No, this can be optional, indexing is still possible
*/ */
retryInfo: RetryInfo; retryInfo: RetryInfo;
@ -1350,6 +1362,8 @@ export interface WithdrawalGroupRecord {
*/ */
timestampFinish?: Timestamp; timestampFinish?: Timestamp;
operationStatus: OperationStatus;
/** /**
* Amount including fees (i.e. the amount subtracted from the * Amount including fees (i.e. the amount subtracted from the
* reserve to withdraw all coins in this withdrawal session). * reserve to withdraw all coins in this withdrawal session).
@ -1561,6 +1575,8 @@ export interface DepositGroupRecord {
timestampFinished: Timestamp | undefined; timestampFinished: Timestamp | undefined;
operationStatus: OperationStatus;
lastError: TalerErrorDetails | undefined; lastError: TalerErrorDetails | undefined;
/** /**
@ -1601,6 +1617,18 @@ export interface TombstoneRecord {
id: string; id: string;
} }
export interface BalancePerCurrencyRecord {
currency: string;
availableNow: AmountString;
availableExpected: AmountString;
pendingIncoming: AmountString;
pendingOutgoing: AmountString;
}
export const WalletStoresV1 = { export const WalletStoresV1 = {
coins: describeStore( coins: describeStore(
describeContents<CoinRecord>("coins", { describeContents<CoinRecord>("coins", {
@ -1671,7 +1699,9 @@ export const WalletStoresV1 = {
describeContents<RefreshGroupRecord>("refreshGroups", { describeContents<RefreshGroupRecord>("refreshGroups", {
keyPath: "refreshGroupId", keyPath: "refreshGroupId",
}), }),
{}, {
byStatus: describeIndex("byStatus", "operationStatus"),
},
), ),
recoupGroups: describeStore( recoupGroups: describeStore(
describeContents<RecoupGroupRecord>("recoupGroups", { describeContents<RecoupGroupRecord>("recoupGroups", {
@ -1686,6 +1716,7 @@ export const WalletStoresV1 = {
"byInitialWithdrawalGroupId", "byInitialWithdrawalGroupId",
"initialWithdrawalGroupId", "initialWithdrawalGroupId",
), ),
byStatus: describeIndex("byStatus", "operationStatus"),
}, },
), ),
purchases: describeStore( purchases: describeStore(
@ -1716,6 +1747,7 @@ export const WalletStoresV1 = {
}), }),
{ {
byReservePub: describeIndex("byReservePub", "reservePub"), byReservePub: describeIndex("byReservePub", "reservePub"),
byStatus: describeIndex("byStatus", "operationStatus"),
}, },
), ),
planchets: describeStore( planchets: describeStore(
@ -1753,7 +1785,9 @@ export const WalletStoresV1 = {
describeContents<DepositGroupRecord>("depositGroups", { describeContents<DepositGroupRecord>("depositGroups", {
keyPath: "depositGroupId", keyPath: "depositGroupId",
}), }),
{}, {
byStatus: describeIndex("byStatus", "operationStatus"),
},
), ),
tombstones: describeStore( tombstones: describeStore(
describeContents<TombstoneRecord>("tombstones", { keyPath: "id" }), describeContents<TombstoneRecord>("tombstones", { keyPath: "id" }),
@ -1765,6 +1799,12 @@ export const WalletStoresV1 = {
}), }),
{}, {},
), ),
balancesPerCurrency: describeStore(
describeContents<BalancePerCurrencyRecord>("balancesPerCurrency", {
keyPath: "currency",
}),
{},
),
}; };
export interface MetaConfigRecord { export interface MetaConfigRecord {

View File

@ -37,6 +37,7 @@ import type { IDBFactory } from "@gnu-taler/idb-bridge";
import { WalletNotification } from "@gnu-taler/taler-util"; import { WalletNotification } from "@gnu-taler/taler-util";
import { Wallet } from "../wallet.js"; import { Wallet } from "../wallet.js";
import * as fs from "fs"; import * as fs from "fs";
import { AccessStats } from "@gnu-taler/idb-bridge/src/MemoryBackend";
const logger = new Logger("headless/helpers.ts"); const logger = new Logger("headless/helpers.ts");
@ -80,6 +81,21 @@ function makeId(length: number): string {
export async function getDefaultNodeWallet( export async function getDefaultNodeWallet(
args: DefaultNodeWalletArgs = {}, args: DefaultNodeWalletArgs = {},
): Promise<Wallet> { ): Promise<Wallet> {
const res = await getDefaultNodeWallet2(args);
return res.wallet;
}
/**
* Get a wallet instance with default settings for node.
*
* Extended version that allows getting DB stats.
*/
export async function getDefaultNodeWallet2(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
BridgeIDBFactory.enableTracing = false; BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend(); const myBackend = new MemoryBackend();
myBackend.enableTracing = false; myBackend.enableTracing = false;
@ -121,7 +137,7 @@ export async function getDefaultNodeWallet(
BridgeIDBFactory.enableTracing = false; BridgeIDBFactory.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend); const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
const myIdbFactory: IDBFactory = (myBridgeIdbFactory as any) as IDBFactory; const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory;
let myHttpLib; let myHttpLib;
if (args.httpLib) { if (args.httpLib) {
@ -164,5 +180,8 @@ export async function getDefaultNodeWallet(
if (args.notifyHandler) { if (args.notifyHandler) {
w.addNotificationListener(args.notifyHandler); w.addNotificationListener(args.notifyHandler);
} }
return w; return {
wallet: w,
getDbStats: () => myBackend.accessStats,
};
} }

View File

@ -20,6 +20,9 @@ export * from "./index.js";
export { NodeHttpLib } from "./headless/NodeHttpLib.js"; export { NodeHttpLib } from "./headless/NodeHttpLib.js";
export { export {
getDefaultNodeWallet, getDefaultNodeWallet,
getDefaultNodeWallet2,
DefaultNodeWalletArgs, DefaultNodeWalletArgs,
} from "./headless/helpers.js"; } from "./headless/helpers.js";
export * from "./crypto/workers/nodeThreadWorker.js"; export * from "./crypto/workers/nodeThreadWorker.js";
export type { AccessStats } from "@gnu-taler/idb-bridge";

View File

@ -4,4 +4,4 @@ This folder contains the implementations for all wallet operations that operate
To avoid cyclic dependencies, these files must **not** reference each other. Instead, other operations should only be accessed via injected dependencies. To avoid cyclic dependencies, these files must **not** reference each other. Instead, other operations should only be accessed via injected dependencies.
Avoiding cyclic dependencies is important for module bundlers. Avoiding cyclic dependencies is important for module bundlers.

View File

@ -47,6 +47,7 @@ import {
WireInfo, WireInfo,
WalletStoresV1, WalletStoresV1,
RefreshCoinStatus, RefreshCoinStatus,
OperationStatus,
} from "../../db.js"; } from "../../db.js";
import { PayCoinSelection } from "../../util/coinSelection.js"; import { PayCoinSelection } from "../../util/coinSelection.js";
import { j2s } from "@gnu-taler/taler-util"; import { j2s } from "@gnu-taler/taler-util";
@ -180,8 +181,11 @@ async function getDenomSelStateFromBackup(
const d = await tx.denominations.get([exchangeBaseUrl, s.denom_pub_hash]); const d = await tx.denominations.get([exchangeBaseUrl, s.denom_pub_hash]);
checkBackupInvariant(!!d); checkBackupInvariant(!!d);
totalCoinValue = Amounts.add(totalCoinValue, d.value).amount; totalCoinValue = Amounts.add(totalCoinValue, d.value).amount;
totalWithdrawCost = Amounts.add(totalWithdrawCost, d.value, d.feeWithdraw) totalWithdrawCost = Amounts.add(
.amount; totalWithdrawCost,
d.value,
d.feeWithdraw,
).amount;
} }
return { return {
selectedDenoms, selectedDenoms,
@ -475,6 +479,8 @@ export async function importBackup(
backupExchangeDetails.base_url, backupExchangeDetails.base_url,
backupReserve.initial_selected_denoms, backupReserve.initial_selected_denoms,
), ),
// FIXME!
operationStatus: OperationStatus.Pending,
}); });
} }
for (const backupWg of backupReserve.withdrawal_groups) { for (const backupWg of backupReserve.withdrawal_groups) {
@ -507,6 +513,9 @@ export async function importBackup(
timestampFinish: backupWg.timestamp_finish, timestampFinish: backupWg.timestamp_finish,
withdrawalGroupId: backupWg.withdrawal_group_id, withdrawalGroupId: backupWg.withdrawal_group_id,
denomSelUid: backupWg.selected_denoms_id, denomSelUid: backupWg.selected_denoms_id,
operationStatus: backupWg.timestamp_finish
? OperationStatus.Finished
: OperationStatus.Pending,
}); });
} }
} }
@ -758,7 +767,8 @@ export async function importBackup(
// FIXME! // FIXME!
payRetryInfo: initRetryInfo(), payRetryInfo: initRetryInfo(),
download, download,
paymentSubmitPending: !backupPurchase.timestamp_first_successful_pay, paymentSubmitPending:
!backupPurchase.timestamp_first_successful_pay,
refundQueryRequested: false, refundQueryRequested: false,
payCoinSelection: await recoverPayCoinSelection( payCoinSelection: await recoverPayCoinSelection(
tx, tx,
@ -809,10 +819,8 @@ export async function importBackup(
reason = RefreshReason.Scheduled; reason = RefreshReason.Scheduled;
break; break;
} }
const refreshSessionPerCoin: ( const refreshSessionPerCoin: (RefreshSessionRecord | undefined)[] =
| RefreshSessionRecord [];
| undefined
)[] = [];
for (const oldCoin of backupRefreshGroup.old_coins) { for (const oldCoin of backupRefreshGroup.old_coins) {
const c = await tx.coins.get(oldCoin.coin_pub); const c = await tx.coins.get(oldCoin.coin_pub);
checkBackupInvariant(!!c); checkBackupInvariant(!!c);
@ -848,6 +856,9 @@ export async function importBackup(
? RefreshCoinStatus.Finished ? RefreshCoinStatus.Finished
: RefreshCoinStatus.Pending, : RefreshCoinStatus.Pending,
), ),
operationStatus: backupRefreshGroup.timestamp_finish
? OperationStatus.Finished
: OperationStatus.Pending,
inputPerCoin: backupRefreshGroup.old_coins.map((x) => inputPerCoin: backupRefreshGroup.old_coins.map((x) =>
Amounts.parseOrThrow(x.input_amount), Amounts.parseOrThrow(x.input_amount),
), ),

View File

@ -47,6 +47,10 @@ export async function getBalancesInsideTransaction(
withdrawalGroups: typeof WalletStoresV1.withdrawalGroups; withdrawalGroups: typeof WalletStoresV1.withdrawalGroups;
}>, }>,
): Promise<BalancesResponse> { ): Promise<BalancesResponse> {
return {
balances: [],
};
const balanceStore: Record<string, WalletBalance> = {}; const balanceStore: Record<string, WalletBalance> = {};
/** /**
@ -148,6 +152,9 @@ export async function getBalancesInsideTransaction(
export async function getBalances( export async function getBalances(
ws: InternalWalletState, ws: InternalWalletState,
): Promise<BalancesResponse> { ): Promise<BalancesResponse> {
return {
balances: [],
};
logger.trace("starting to compute balance"); logger.trace("starting to compute balance");
const wbal = await ws.db const wbal = await ws.db

View File

@ -50,7 +50,7 @@ import {
getRandomBytes, getRandomBytes,
stringToBytes, stringToBytes,
} from "@gnu-taler/taler-util"; } from "@gnu-taler/taler-util";
import { DepositGroupRecord } from "../db.js"; import { DepositGroupRecord, OperationStatus } from "../db.js";
import { guardOperationException } from "../errors.js"; import { guardOperationException } from "../errors.js";
import { PayCoinSelection, selectPayCoins } from "../util/coinSelection.js"; import { PayCoinSelection, selectPayCoins } from "../util/coinSelection.js";
import { readSuccessResponseJsonOrThrow } from "../util/http.js"; import { readSuccessResponseJsonOrThrow } from "../util/http.js";
@ -281,6 +281,7 @@ async function processDepositGroupImpl(
} }
if (allDeposited) { if (allDeposited) {
dg.timestampFinished = getTimestampNow(); dg.timestampFinished = getTimestampNow();
dg.operationStatus = OperationStatus.Finished;
delete dg.lastError; delete dg.lastError;
delete dg.retryInfo; delete dg.retryInfo;
await tx.depositGroups.put(dg); await tx.depositGroups.put(dg);
@ -409,11 +410,7 @@ export async function getFeeForDeposit(
refund_deadline: { t_ms: 0 }, refund_deadline: { t_ms: 0 },
}; };
const contractData = extractContractData( const contractData = extractContractData(contractTerms, "", "");
contractTerms,
"",
"",
);
const candidates = await getCandidatePayCoins(ws, contractData); const candidates = await getCandidatePayCoins(ws, contractData);
@ -436,7 +433,6 @@ export async function getFeeForDeposit(
amount, amount,
payCoinSel, payCoinSel,
); );
} }
export async function createDepositGroup( export async function createDepositGroup(
@ -570,6 +566,7 @@ export async function createDepositGroup(
salt: wireSalt, salt: wireSalt,
}, },
retryInfo: initRetryInfo(), retryInfo: initRetryInfo(),
operationStatus: OperationStatus.Pending,
lastError: undefined, lastError: undefined,
}; };
@ -708,8 +705,10 @@ export async function getTotalFeeForDepositAmount(
.filter((x) => .filter((x) =>
Amounts.isSameCurrency(x.value, pcs.coinContributions[i]), Amounts.isSameCurrency(x.value, pcs.coinContributions[i]),
); );
const amountLeft = Amounts.sub(denom.value, pcs.coinContributions[i]) const amountLeft = Amounts.sub(
.amount; denom.value,
pcs.coinContributions[i],
).amount;
const refreshCost = getTotalRefreshCost(allDenoms, denom, amountLeft); const refreshCost = getTotalRefreshCost(allDenoms, denom, amountLeft);
refreshFee.push(refreshCost); refreshFee.push(refreshCost);
} }
@ -736,8 +735,17 @@ export async function getTotalFeeForDepositAmount(
}); });
return { return {
coin: coinFee.length === 0 ? Amounts.getZero(total.currency) : Amounts.sum(coinFee).amount, coin:
wire: wireFee.length === 0 ? Amounts.getZero(total.currency) : Amounts.sum(wireFee).amount, coinFee.length === 0
refresh: refreshFee.length === 0 ? Amounts.getZero(total.currency) : Amounts.sum(refreshFee).amount ? Amounts.getZero(total.currency)
: Amounts.sum(coinFee).amount,
wire:
wireFee.length === 0
? Amounts.getZero(total.currency)
: Amounts.sum(wireFee).amount,
refresh:
refreshFee.length === 0
? Amounts.getZero(total.currency)
: Amounts.sum(refreshFee).amount,
}; };
} }

View File

@ -28,6 +28,7 @@ import {
WalletStoresV1, WalletStoresV1,
BackupProviderStateTag, BackupProviderStateTag,
RefreshCoinStatus, RefreshCoinStatus,
OperationStatus,
} from "../db.js"; } from "../db.js";
import { import {
PendingOperationsResponse, PendingOperationsResponse,
@ -37,6 +38,8 @@ import {
import { import {
getTimestampNow, getTimestampNow,
isTimestampExpired, isTimestampExpired,
j2s,
Logger,
Timestamp, Timestamp,
} from "@gnu-taler/taler-util"; } from "@gnu-taler/taler-util";
import { InternalWalletState } from "../common.js"; import { InternalWalletState } from "../common.js";
@ -82,33 +85,35 @@ async function gatherReservePending(
now: Timestamp, now: Timestamp,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
await tx.reserves.iter().forEach((reserve) => { await tx.reserves.indexes.byStatus
const reserveType = reserve.bankInfo .iter(OperationStatus.Pending)
? ReserveType.TalerBankWithdraw .forEach((reserve) => {
: ReserveType.Manual; const reserveType = reserve.bankInfo
switch (reserve.reserveStatus) { ? ReserveType.TalerBankWithdraw
case ReserveRecordStatus.DORMANT: : ReserveType.Manual;
// nothing to report as pending switch (reserve.reserveStatus) {
break; case ReserveRecordStatus.DORMANT:
case ReserveRecordStatus.WAIT_CONFIRM_BANK: // nothing to report as pending
case ReserveRecordStatus.QUERYING_STATUS: break;
case ReserveRecordStatus.REGISTERING_BANK: case ReserveRecordStatus.WAIT_CONFIRM_BANK:
resp.pendingOperations.push({ case ReserveRecordStatus.QUERYING_STATUS:
type: PendingTaskType.Reserve, case ReserveRecordStatus.REGISTERING_BANK:
givesLifeness: true, resp.pendingOperations.push({
timestampDue: reserve.retryInfo.nextRetry, type: PendingTaskType.Reserve,
stage: reserve.reserveStatus, givesLifeness: true,
timestampCreated: reserve.timestampCreated, timestampDue: reserve.retryInfo.nextRetry,
reserveType, stage: reserve.reserveStatus,
reservePub: reserve.reservePub, timestampCreated: reserve.timestampCreated,
retryInfo: reserve.retryInfo, reserveType,
}); reservePub: reserve.reservePub,
break; retryInfo: reserve.retryInfo,
default: });
// FIXME: report problem! break;
break; default:
} // FIXME: report problem!
}); break;
}
});
} }
async function gatherRefreshPending( async function gatherRefreshPending(
@ -116,24 +121,26 @@ async function gatherRefreshPending(
now: Timestamp, now: Timestamp,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
await tx.refreshGroups.iter().forEach((r) => { await tx.refreshGroups.indexes.byStatus
if (r.timestampFinished) { .iter(OperationStatus.Pending)
return; .forEach((r) => {
} if (r.timestampFinished) {
if (r.frozen) { return;
return; }
} if (r.frozen) {
resp.pendingOperations.push({ return;
type: PendingTaskType.Refresh, }
givesLifeness: true, resp.pendingOperations.push({
timestampDue: r.retryInfo.nextRetry, type: PendingTaskType.Refresh,
refreshGroupId: r.refreshGroupId, givesLifeness: true,
finishedPerCoin: r.statusPerCoin.map( timestampDue: r.retryInfo.nextRetry,
(x) => x === RefreshCoinStatus.Finished, refreshGroupId: r.refreshGroupId,
), finishedPerCoin: r.statusPerCoin.map(
retryInfo: r.retryInfo, (x) => x === RefreshCoinStatus.Finished,
),
retryInfo: r.retryInfo,
});
}); });
});
} }
async function gatherWithdrawalPending( async function gatherWithdrawalPending(
@ -144,29 +151,31 @@ async function gatherWithdrawalPending(
now: Timestamp, now: Timestamp,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
await tx.withdrawalGroups.iter().forEachAsync(async (wsr) => { await tx.withdrawalGroups.indexes.byStatus
if (wsr.timestampFinish) { .iter(OperationStatus.Pending)
return; .forEachAsync(async (wsr) => {
} if (wsr.timestampFinish) {
let numCoinsWithdrawn = 0; return;
let numCoinsTotal = 0; }
await tx.planchets.indexes.byGroup let numCoinsWithdrawn = 0;
.iter(wsr.withdrawalGroupId) let numCoinsTotal = 0;
.forEach((x) => { await tx.planchets.indexes.byGroup
numCoinsTotal++; .iter(wsr.withdrawalGroupId)
if (x.withdrawalDone) { .forEach((x) => {
numCoinsWithdrawn++; numCoinsTotal++;
} if (x.withdrawalDone) {
numCoinsWithdrawn++;
}
});
resp.pendingOperations.push({
type: PendingTaskType.Withdraw,
givesLifeness: true,
timestampDue: wsr.retryInfo.nextRetry,
withdrawalGroupId: wsr.withdrawalGroupId,
lastError: wsr.lastError,
retryInfo: wsr.retryInfo,
}); });
resp.pendingOperations.push({
type: PendingTaskType.Withdraw,
givesLifeness: true,
timestampDue: wsr.retryInfo.nextRetry,
withdrawalGroupId: wsr.withdrawalGroupId,
lastError: wsr.lastError,
retryInfo: wsr.retryInfo,
}); });
});
} }
async function gatherProposalPending( async function gatherProposalPending(
@ -199,20 +208,22 @@ async function gatherDepositPending(
now: Timestamp, now: Timestamp,
resp: PendingOperationsResponse, resp: PendingOperationsResponse,
): Promise<void> { ): Promise<void> {
await tx.depositGroups.iter().forEach((dg) => { await tx.depositGroups.indexes.byStatus
if (dg.timestampFinished) { .iter(OperationStatus.Pending)
return; .forEach((dg) => {
} if (dg.timestampFinished) {
const timestampDue = dg.retryInfo?.nextRetry ?? getTimestampNow(); return;
resp.pendingOperations.push({ }
type: PendingTaskType.Deposit, const timestampDue = dg.retryInfo?.nextRetry ?? getTimestampNow();
givesLifeness: true, resp.pendingOperations.push({
timestampDue, type: PendingTaskType.Deposit,
depositGroupId: dg.depositGroupId, givesLifeness: true,
lastError: dg.lastError, timestampDue,
retryInfo: dg.retryInfo, depositGroupId: dg.depositGroupId,
lastError: dg.lastError,
retryInfo: dg.retryInfo,
});
}); });
});
} }
async function gatherTipPending( async function gatherTipPending(

View File

@ -26,6 +26,7 @@ import {
CoinSourceType, CoinSourceType,
CoinStatus, CoinStatus,
DenominationRecord, DenominationRecord,
OperationStatus,
RefreshCoinStatus, RefreshCoinStatus,
RefreshGroupRecord, RefreshGroupRecord,
WalletStoresV1, WalletStoresV1,
@ -127,6 +128,7 @@ function updateGroupStatus(rg: RefreshGroupRecord): void {
rg.retryInfo = initRetryInfo(); rg.retryInfo = initRetryInfo();
} else { } else {
rg.timestampFinished = getTimestampNow(); rg.timestampFinished = getTimestampNow();
rg.operationStatus = OperationStatus.Finished;
rg.retryInfo = initRetryInfo(); rg.retryInfo = initRetryInfo();
} }
} }
@ -929,6 +931,7 @@ export async function createRefreshGroup(
} }
const refreshGroup: RefreshGroupRecord = { const refreshGroup: RefreshGroupRecord = {
operationStatus: OperationStatus.Pending,
timestampFinished: undefined, timestampFinished: undefined,
statusPerCoin: oldCoinPubs.map(() => RefreshCoinStatus.Pending), statusPerCoin: oldCoinPubs.map(() => RefreshCoinStatus.Pending),
lastError: undefined, lastError: undefined,

View File

@ -41,6 +41,7 @@ import {
} from "@gnu-taler/taler-util"; } from "@gnu-taler/taler-util";
import { InternalWalletState } from "../common.js"; import { InternalWalletState } from "../common.js";
import { import {
OperationStatus,
ReserveBankInfo, ReserveBankInfo,
ReserveRecord, ReserveRecord,
ReserveRecordStatus, ReserveRecordStatus,
@ -155,6 +156,7 @@ export async function createReserve(
lastError: undefined, lastError: undefined,
currency: req.amount.currency, currency: req.amount.currency,
requestedQuery: false, requestedQuery: false,
operationStatus: OperationStatus.Pending,
}; };
const exchangeInfo = await updateExchangeFromUrl(ws, req.exchange); const exchangeInfo = await updateExchangeFromUrl(ws, req.exchange);
@ -250,6 +252,7 @@ export async function forceQueryReserve(
switch (reserve.reserveStatus) { switch (reserve.reserveStatus) {
case ReserveRecordStatus.DORMANT: case ReserveRecordStatus.DORMANT:
reserve.reserveStatus = ReserveRecordStatus.QUERYING_STATUS; reserve.reserveStatus = ReserveRecordStatus.QUERYING_STATUS;
reserve.operationStatus = OperationStatus.Pending;
break; break;
default: default:
reserve.requestedQuery = true; reserve.requestedQuery = true;
@ -338,6 +341,7 @@ async function registerReserveWithBank(
} }
r.timestampReserveInfoPosted = getTimestampNow(); r.timestampReserveInfoPosted = getTimestampNow();
r.reserveStatus = ReserveRecordStatus.WAIT_CONFIRM_BANK; r.reserveStatus = ReserveRecordStatus.WAIT_CONFIRM_BANK;
r.operationStatus = OperationStatus.Pending;
if (!r.bankInfo) { if (!r.bankInfo) {
throw Error("invariant failed"); throw Error("invariant failed");
} }
@ -419,6 +423,7 @@ async function processReserveBankStatusImpl(
const now = getTimestampNow(); const now = getTimestampNow();
r.timestampBankConfirmed = now; r.timestampBankConfirmed = now;
r.reserveStatus = ReserveRecordStatus.BANK_ABORTED; r.reserveStatus = ReserveRecordStatus.BANK_ABORTED;
r.operationStatus = OperationStatus.Finished;
r.retryInfo = initRetryInfo(); r.retryInfo = initRetryInfo();
await tx.reserves.put(r); await tx.reserves.put(r);
}); });
@ -455,6 +460,7 @@ async function processReserveBankStatusImpl(
const now = getTimestampNow(); const now = getTimestampNow();
r.timestampBankConfirmed = now; r.timestampBankConfirmed = now;
r.reserveStatus = ReserveRecordStatus.QUERYING_STATUS; r.reserveStatus = ReserveRecordStatus.QUERYING_STATUS;
r.operationStatus = OperationStatus.Pending;
r.retryInfo = initRetryInfo(); r.retryInfo = initRetryInfo();
} else { } else {
switch (r.reserveStatus) { switch (r.reserveStatus) {
@ -658,6 +664,7 @@ async function updateReserve(
if (denomSelInfo.selectedDenoms.length === 0) { if (denomSelInfo.selectedDenoms.length === 0) {
newReserve.reserveStatus = ReserveRecordStatus.DORMANT; newReserve.reserveStatus = ReserveRecordStatus.DORMANT;
newReserve.operationStatus = OperationStatus.Finished;
newReserve.lastError = undefined; newReserve.lastError = undefined;
newReserve.retryInfo = initRetryInfo(); newReserve.retryInfo = initRetryInfo();
await tx.reserves.put(newReserve); await tx.reserves.put(newReserve);
@ -684,11 +691,13 @@ async function updateReserve(
denomsSel: denomSelectionInfoToState(denomSelInfo), denomsSel: denomSelectionInfoToState(denomSelInfo),
secretSeed: encodeCrock(getRandomBytes(64)), secretSeed: encodeCrock(getRandomBytes(64)),
denomSelUid: encodeCrock(getRandomBytes(32)), denomSelUid: encodeCrock(getRandomBytes(32)),
operationStatus: OperationStatus.Pending,
}; };
newReserve.lastError = undefined; newReserve.lastError = undefined;
newReserve.retryInfo = initRetryInfo(); newReserve.retryInfo = initRetryInfo();
newReserve.reserveStatus = ReserveRecordStatus.DORMANT; newReserve.reserveStatus = ReserveRecordStatus.DORMANT;
newReserve.operationStatus = OperationStatus.Finished;
await tx.reserves.put(newReserve); await tx.reserves.put(newReserve);
await tx.withdrawalGroups.put(withdrawalRecord); await tx.withdrawalGroups.put(withdrawalRecord);

View File

@ -53,6 +53,7 @@ import {
DenomSelectionState, DenomSelectionState,
ExchangeDetailsRecord, ExchangeDetailsRecord,
ExchangeRecord, ExchangeRecord,
OperationStatus,
PlanchetRecord, PlanchetRecord,
} from "../db.js"; } from "../db.js";
import { walletCoreDebugFlags } from "../util/debugFlags.js"; import { walletCoreDebugFlags } from "../util/debugFlags.js";
@ -968,7 +969,8 @@ async function processWithdrawGroupImpl(
if (wg.timestampFinish === undefined && numFinished === numTotalCoins) { if (wg.timestampFinish === undefined && numFinished === numTotalCoins) {
finishedForFirstTime = true; finishedForFirstTime = true;
wg.timestampFinish = getTimestampNow(); wg.timestampFinish = getTimestampNow();
wg.lastError = undefined; wg.operationStatus = OperationStatus.Finished;
delete wg.lastError;
wg.retryInfo = initRetryInfo(); wg.retryInfo = initRetryInfo();
} }