fix DB indexing issues

This commit is contained in:
Florian Dold 2022-01-11 21:00:12 +01:00
parent a05e891d6e
commit a74cdf0529
No known key found for this signature in database
GPG Key ID: D2E4F00F29D02A4B
17 changed files with 455 additions and 178 deletions

View File

@ -229,6 +229,16 @@ function furthestKey(
}
}
export interface AccessStats {
writeTransactions: number;
readTransactions: number;
writesPerStore: Record<string, number>;
readsPerStore: Record<string, number>;
readsPerIndex: Record<string, number>;
readItemsPerIndex: Record<string, number>;
readItemsPerStore: Record<string, number>;
}
/**
* Primitive in-memory backend.
*
@ -266,6 +276,18 @@ export class MemoryBackend implements Backend {
enableTracing: boolean = false;
trackStats: boolean = true;
accessStats: AccessStats = {
readTransactions: 0,
writeTransactions: 0,
readsPerStore: {},
readsPerIndex: {},
readItemsPerIndex: {},
readItemsPerStore: {},
writesPerStore: {},
};
/**
* Load the data in this IndexedDB backend from a dump in JSON format.
*
@ -512,6 +534,14 @@ export class MemoryBackend implements Backend {
throw Error("unsupported transaction mode");
}
if (this.trackStats) {
if (mode === "readonly") {
this.accessStats.readTransactions++;
} else if (mode === "readwrite") {
this.accessStats.writeTransactions++;
}
}
myDb.txRestrictObjectStores = [...objectStores];
this.connectionsByTransaction[transactionCookie] = myConn;
@ -1153,6 +1183,13 @@ export class MemoryBackend implements Backend {
lastIndexPosition: req.lastIndexPosition,
lastObjectStorePosition: req.lastObjectStorePosition,
});
if (this.trackStats) {
const k = `${req.objectStoreName}.${req.indexName}`;
this.accessStats.readsPerIndex[k] =
(this.accessStats.readsPerIndex[k] ?? 0) + 1;
this.accessStats.readItemsPerIndex[k] =
(this.accessStats.readItemsPerIndex[k] ?? 0) + resp.count;
}
} else {
if (req.advanceIndexKey !== undefined) {
throw Error("unsupported request");
@ -1167,6 +1204,13 @@ export class MemoryBackend implements Backend {
lastIndexPosition: req.lastIndexPosition,
lastObjectStorePosition: req.lastObjectStorePosition,
});
if (this.trackStats) {
const k = `${req.objectStoreName}`;
this.accessStats.readsPerStore[k] =
(this.accessStats.readsPerStore[k] ?? 0) + 1;
this.accessStats.readItemsPerStore[k] =
(this.accessStats.readItemsPerStore[k] ?? 0) + resp.count;
}
}
if (this.enableTracing) {
console.log(`TRACING: getRecords got ${resp.count} results`);
@ -1180,6 +1224,11 @@ export class MemoryBackend implements Backend {
): Promise<RecordStoreResponse> {
if (this.enableTracing) {
console.log(`TRACING: storeRecord`);
console.log(
`key ${storeReq.key}, record ${JSON.stringify(
structuredEncapsulate(storeReq.value),
)}`,
);
}
const myConn = this.requireConnectionFromTransaction(btx);
const db = this.databases[myConn.dbName];
@ -1199,6 +1248,12 @@ export class MemoryBackend implements Backend {
}', transaction is over ${JSON.stringify(db.txRestrictObjectStores)}`,
);
}
if (this.trackStats) {
this.accessStats.writesPerStore[storeReq.objectStoreName] =
(this.accessStats.writesPerStore[storeReq.objectStoreName] ?? 0) + 1;
}
const schema = myConn.modifiedSchema;
const objectStoreMapEntry = myConn.objectStoreMap[storeReq.objectStoreName];
@ -1275,7 +1330,9 @@ export class MemoryBackend implements Backend {
}
}
const objectStoreRecord: ObjectStoreRecord = {
const oldStoreRecord = modifiedData.get(key);
const newObjectStoreRecord: ObjectStoreRecord = {
// FIXME: We should serialize the key here, not just clone it.
primaryKey: structuredClone(key),
value: structuredClone(value),
@ -1283,7 +1340,7 @@ export class MemoryBackend implements Backend {
objectStoreMapEntry.store.modifiedData = modifiedData.with(
key,
objectStoreRecord,
newObjectStoreRecord,
true,
);
@ -1297,6 +1354,11 @@ export class MemoryBackend implements Backend {
}
const indexProperties =
schema.objectStores[storeReq.objectStoreName].indexes[indexName];
// Remove old index entry first!
if (oldStoreRecord) {
this.deleteFromIndex(index, key, oldStoreRecord.value, indexProperties);
}
try {
this.insertIntoIndex(index, key, value, indexProperties);
} catch (e) {
@ -1482,31 +1544,28 @@ function getIndexRecords(req: {
const primaryKeys: Key[] = [];
const values: Value[] = [];
const { unique, range, forward, indexData } = req;
let indexPos = req.lastIndexPosition;
let objectStorePos: IDBValidKey | undefined = undefined;
let indexEntry: IndexRecord | undefined = undefined;
const rangeStart = forward ? range.lower : range.upper;
const dataStart = forward ? indexData.minKey() : indexData.maxKey();
indexPos = furthestKey(forward, indexPos, rangeStart);
indexPos = furthestKey(forward, indexPos, dataStart);
function nextIndexEntry(): IndexRecord | undefined {
assertInvariant(indexPos != null);
function nextIndexEntry(prevPos: IDBValidKey): IndexRecord | undefined {
const res: [IDBValidKey, IndexRecord] | undefined = forward
? indexData.nextHigherPair(indexPos)
: indexData.nextLowerPair(indexPos);
if (res) {
indexEntry = res[1];
indexPos = indexEntry.indexKey;
return indexEntry;
} else {
indexEntry = undefined;
indexPos = undefined;
return undefined;
}
? indexData.nextHigherPair(prevPos)
: indexData.nextLowerPair(prevPos);
return res ? res[1] : undefined;
}
function packResult(): RecordGetResponse {
// Collect the values based on the primary keys,
// if requested.
if (req.resultLevel === ResultLevel.Full) {
for (let i = 0; i < numResults; i++) {
const result = req.storeData.get(primaryKeys[i]);
if (!result) {
console.error("invariant violated during read");
console.error("request was", req);
throw Error("invariant violated during read");
}
values.push(structuredClone(result.value));
}
}
return {
count: numResults,
indexKeys:
@ -1517,18 +1576,39 @@ function getIndexRecords(req: {
};
}
if (indexPos == null) {
let firstIndexPos = req.lastIndexPosition;
{
const rangeStart = forward ? range.lower : range.upper;
const dataStart = forward ? indexData.minKey() : indexData.maxKey();
firstIndexPos = furthestKey(forward, firstIndexPos, rangeStart);
firstIndexPos = furthestKey(forward, firstIndexPos, dataStart);
}
if (firstIndexPos == null) {
return packResult();
}
let objectStorePos: IDBValidKey | undefined = undefined;
let indexEntry: IndexRecord | undefined = undefined;
// Now we align at indexPos and after objectStorePos
indexEntry = indexData.get(indexPos);
indexEntry = indexData.get(firstIndexPos);
if (!indexEntry) {
// We're not aligned to an index key, go to next index entry
nextIndexEntry();
}
if (indexEntry) {
indexEntry = nextIndexEntry(firstIndexPos);
if (!indexEntry) {
return packResult();
}
objectStorePos = nextKey(true, indexEntry.primaryKeys, undefined);
} else if (
req.lastIndexPosition != null &&
compareKeys(req.lastIndexPosition, indexEntry.indexKey) !== 0
) {
// We're already past the desired lastIndexPosition, don't use
// lastObjectStorePosition.
objectStorePos = nextKey(true, indexEntry.primaryKeys, undefined);
} else {
objectStorePos = nextKey(
true,
indexEntry.primaryKeys,
@ -1536,43 +1616,56 @@ function getIndexRecords(req: {
);
}
// Now skip lower/upper bound of open ranges
if (
forward &&
range.lowerOpen &&
range.lower != null &&
compareKeys(range.lower, indexPos) === 0
compareKeys(range.lower, indexEntry.indexKey) === 0
) {
const e = nextIndexEntry();
objectStorePos = e?.primaryKeys.minKey();
indexEntry = nextIndexEntry(indexEntry.indexKey);
if (!indexEntry) {
return packResult();
}
objectStorePos = indexEntry.primaryKeys.minKey();
}
if (
!forward &&
range.upperOpen &&
range.upper != null &&
compareKeys(range.upper, indexPos) === 0
compareKeys(range.upper, indexEntry.indexKey) === 0
) {
const e = nextIndexEntry();
objectStorePos = e?.primaryKeys.minKey();
indexEntry = nextIndexEntry(indexEntry.indexKey);
if (!indexEntry) {
return packResult();
}
objectStorePos = indexEntry.primaryKeys.minKey();
}
// If requested, return only unique results
if (
unique &&
indexPos != null &&
req.lastIndexPosition != null &&
compareKeys(indexPos, req.lastIndexPosition) === 0
compareKeys(indexEntry.indexKey, req.lastIndexPosition) === 0
) {
const e = nextIndexEntry();
objectStorePos = e?.primaryKeys.minKey();
indexEntry = nextIndexEntry(indexEntry.indexKey);
if (!indexEntry) {
return packResult();
}
objectStorePos = indexEntry.primaryKeys.minKey();
}
if (req.advancePrimaryKey) {
indexPos = furthestKey(forward, indexPos, req.advanceIndexKey);
if (indexPos) {
indexEntry = indexData.get(indexPos);
if (!indexEntry) {
nextIndexEntry();
}
if (req.advanceIndexKey != null) {
const ik = furthestKey(forward, indexEntry.indexKey, req.advanceIndexKey)!;
indexEntry = indexData.get(ik);
if (!indexEntry) {
indexEntry = nextIndexEntry(ik);
}
if (!indexEntry) {
return packResult();
}
}
@ -1580,9 +1673,7 @@ function getIndexRecords(req: {
if (
req.advanceIndexKey != null &&
req.advancePrimaryKey &&
indexPos != null &&
indexEntry &&
compareKeys(indexPos, req.advanceIndexKey) == 0
compareKeys(indexEntry.indexKey, req.advanceIndexKey) == 0
) {
if (
objectStorePos == null ||
@ -1597,13 +1688,10 @@ function getIndexRecords(req: {
}
while (1) {
if (indexPos === undefined) {
break;
}
if (req.limit != 0 && numResults == req.limit) {
break;
}
if (!range.includes(indexPos)) {
if (!range.includes(indexEntry.indexKey)) {
break;
}
if (indexEntry === undefined) {
@ -1611,14 +1699,16 @@ function getIndexRecords(req: {
}
if (objectStorePos == null) {
// We don't have any more records with the current index key.
nextIndexEntry();
if (indexEntry) {
objectStorePos = indexEntry.primaryKeys.minKey();
indexEntry = nextIndexEntry(indexEntry.indexKey);
if (!indexEntry) {
return packResult();
}
objectStorePos = indexEntry.primaryKeys.minKey();
continue;
}
indexKeys.push(indexEntry.indexKey);
primaryKeys.push(objectStorePos);
indexKeys.push(structuredClone(indexEntry.indexKey));
primaryKeys.push(structuredClone(objectStorePos));
numResults++;
if (unique) {
objectStorePos = undefined;
@ -1627,20 +1717,6 @@ function getIndexRecords(req: {
}
}
// Now we can collect the values based on the primary keys,
// if requested.
if (req.resultLevel === ResultLevel.Full) {
for (let i = 0; i < numResults; i++) {
const result = req.storeData.get(primaryKeys[i]);
if (!result) {
console.error("invariant violated during read");
console.error("request was", req);
throw Error("invariant violated during read");
}
values.push(result.value);
}
}
return packResult();
}

View File

@ -64,7 +64,10 @@ import { makeStoreKeyValue } from "./util/makeStoreKeyValue";
import { normalizeKeyPath } from "./util/normalizeKeyPath";
import { openPromise } from "./util/openPromise";
import queueTask from "./util/queueTask";
import { structuredClone } from "./util/structuredClone";
import {
checkStructuredCloneOrThrow,
structuredClone,
} from "./util/structuredClone";
import { validateKeyPath } from "./util/validateKeyPath";
import { valueToKey } from "./util/valueToKey";
@ -303,7 +306,7 @@ export class BridgeIDBCursor implements IDBCursor {
try {
// Only called for the side effect of throwing an exception
structuredClone(value);
checkStructuredCloneOrThrow(value);
} catch (e) {
throw new DataCloneError();
}
@ -327,6 +330,7 @@ export class BridgeIDBCursor implements IDBCursor {
}
const { btx } = this.source._confirmStartedBackendTransaction();
await this._backend.storeRecord(btx, storeReq);
// FIXME: update the index position here!
};
return transaction._execRequestAsync({
operation,

View File

@ -10,7 +10,6 @@ import {
// IDBCursor.update() - index - modify a record in the object store
test.cb("WPT test idbcursor_update_index.htm", (t) => {
var db: any,
count = 0,
records = [
{ pKey: "primaryKey_0", iKey: "indexKey_0" },
{ pKey: "primaryKey_1", iKey: "indexKey_1" },

View File

@ -72,6 +72,7 @@ export type {
};
export { MemoryBackend } from "./MemoryBackend";
export type { AccessStats } from "./MemoryBackend";
// globalThis polyfill, see https://mathiasbynens.be/notes/globalthis
(function () {

View File

@ -171,6 +171,75 @@ export function mkDeepClone() {
}
}
/**
* Check if an object is deeply cloneable.
* Only called for the side-effect of throwing an exception.
*/
export function mkDeepCloneCheckOnly() {
const refs = [] as any;
return clone;
function cloneArray(a: any) {
var keys = Object.keys(a);
refs.push(a);
for (var i = 0; i < keys.length; i++) {
var k = keys[i] as any;
var cur = a[k];
checkCloneableOrThrow(cur);
if (typeof cur !== "object" || cur === null) {
// do nothing
} else if (cur instanceof Date) {
// do nothing
} else if (ArrayBuffer.isView(cur)) {
// do nothing
} else {
var index = refs.indexOf(cur);
if (index !== -1) {
// do nothing
} else {
clone(cur);
}
}
}
refs.pop();
}
function clone(o: any) {
checkCloneableOrThrow(o);
if (typeof o !== "object" || o === null) return o;
if (o instanceof Date) return;
if (Array.isArray(o)) return cloneArray(o);
if (o instanceof Map) return cloneArray(Array.from(o));
if (o instanceof Set) return cloneArray(Array.from(o));
refs.push(o);
for (var k in o) {
if (Object.hasOwnProperty.call(o, k) === false) continue;
var cur = o[k] as any;
checkCloneableOrThrow(cur);
if (typeof cur !== "object" || cur === null) {
// do nothing
} else if (cur instanceof Date) {
// do nothing
} else if (cur instanceof Map) {
cloneArray(Array.from(cur));
} else if (cur instanceof Set) {
cloneArray(Array.from(cur));
} else if (ArrayBuffer.isView(cur)) {
// do nothing
} else {
var i = refs.indexOf(cur);
if (i !== -1) {
// do nothing
} else {
clone(cur);
}
}
}
refs.pop();
}
}
function internalEncapsulate(
val: any,
outRoot: any,
@ -358,3 +427,10 @@ export function structuredRevive(val: any): any {
export function structuredClone(val: any): any {
return mkDeepClone()(val);
}
/**
* Structured clone for IndexedDB.
*/
export function checkStructuredCloneOrThrow(val: any): void {
return mkDeepCloneCheckOnly()(val);
}

View File

@ -22,13 +22,15 @@ import {
codecForNumber,
codecForString,
codecOptional,
j2s,
Logger,
} from "@gnu-taler/taler-util";
import {
getDefaultNodeWallet,
getDefaultNodeWallet2,
NodeHttpLib,
WalletApiOperation,
Wallet,
AccessStats,
} from "@gnu-taler/taler-wallet-core";
/**
@ -64,6 +66,7 @@ export async function runBench1(configJson: any): Promise<void> {
}
let wallet = {} as Wallet;
let getDbStats: () => AccessStats;
for (let i = 0; i < numIter; i++) {
// Create a new wallet in each iteration
@ -72,12 +75,16 @@ export async function runBench1(configJson: any): Promise<void> {
if (i % restartWallet == 0) {
if (Object.keys(wallet).length !== 0) {
wallet.stop();
console.log("wallet DB stats", j2s(getDbStats!()));
}
wallet = await getDefaultNodeWallet({
const res = await getDefaultNodeWallet2({
// No persistent DB storage.
persistentStoragePath: undefined,
httpLib: myHttpLib,
});
wallet = res.wallet;
getDbStats = res.getDbStats;
if (trustExchange) {
wallet.setInsecureTrustExchange();
}
@ -119,6 +126,7 @@ export async function runBench1(configJson: any): Promise<void> {
}
wallet.stop();
console.log("wallet DB stats", j2s(getDbStats!()));
}
/**

View File

@ -42,6 +42,7 @@ import {
import { RetryInfo } from "./util/retries.js";
import { PayCoinSelection } from "./util/coinSelection.js";
import { Event, IDBDatabase } from "@gnu-taler/idb-bridge";
import { PendingTaskInfo } from "./pending-types.js";
/**
* Name of the Taler database. This is effectively the major
@ -153,6 +154,8 @@ export interface ReserveRecord {
*/
timestampCreated: Timestamp;
operationStatus: OperationStatus;
/**
* Time when the information about this reserve was posted to the bank.
*
@ -914,10 +917,19 @@ export enum RefreshCoinStatus {
Frozen = "frozen",
}
export enum OperationStatus {
Finished = "finished",
Pending = "pending",
}
export interface RefreshGroupRecord {
operationStatus: OperationStatus;
/**
* Retry info, even present when the operation isn't active to allow indexing
* on the next retry timestamp.
*
* FIXME: No, this can be optional, indexing is still possible
*/
retryInfo: RetryInfo;
@ -1350,6 +1362,8 @@ export interface WithdrawalGroupRecord {
*/
timestampFinish?: Timestamp;
operationStatus: OperationStatus;
/**
* Amount including fees (i.e. the amount subtracted from the
* reserve to withdraw all coins in this withdrawal session).
@ -1561,6 +1575,8 @@ export interface DepositGroupRecord {
timestampFinished: Timestamp | undefined;
operationStatus: OperationStatus;
lastError: TalerErrorDetails | undefined;
/**
@ -1601,6 +1617,18 @@ export interface TombstoneRecord {
id: string;
}
export interface BalancePerCurrencyRecord {
currency: string;
availableNow: AmountString;
availableExpected: AmountString;
pendingIncoming: AmountString;
pendingOutgoing: AmountString;
}
export const WalletStoresV1 = {
coins: describeStore(
describeContents<CoinRecord>("coins", {
@ -1671,7 +1699,9 @@ export const WalletStoresV1 = {
describeContents<RefreshGroupRecord>("refreshGroups", {
keyPath: "refreshGroupId",
}),
{},
{
byStatus: describeIndex("byStatus", "operationStatus"),
},
),
recoupGroups: describeStore(
describeContents<RecoupGroupRecord>("recoupGroups", {
@ -1686,6 +1716,7 @@ export const WalletStoresV1 = {
"byInitialWithdrawalGroupId",
"initialWithdrawalGroupId",
),
byStatus: describeIndex("byStatus", "operationStatus"),
},
),
purchases: describeStore(
@ -1716,6 +1747,7 @@ export const WalletStoresV1 = {
}),
{
byReservePub: describeIndex("byReservePub", "reservePub"),
byStatus: describeIndex("byStatus", "operationStatus"),
},
),
planchets: describeStore(
@ -1753,7 +1785,9 @@ export const WalletStoresV1 = {
describeContents<DepositGroupRecord>("depositGroups", {
keyPath: "depositGroupId",
}),
{},
{
byStatus: describeIndex("byStatus", "operationStatus"),
},
),
tombstones: describeStore(
describeContents<TombstoneRecord>("tombstones", { keyPath: "id" }),
@ -1765,6 +1799,12 @@ export const WalletStoresV1 = {
}),
{},
),
balancesPerCurrency: describeStore(
describeContents<BalancePerCurrencyRecord>("balancesPerCurrency", {
keyPath: "currency",
}),
{},
),
};
export interface MetaConfigRecord {

View File

@ -37,6 +37,7 @@ import type { IDBFactory } from "@gnu-taler/idb-bridge";
import { WalletNotification } from "@gnu-taler/taler-util";
import { Wallet } from "../wallet.js";
import * as fs from "fs";
import { AccessStats } from "@gnu-taler/idb-bridge/src/MemoryBackend";
const logger = new Logger("headless/helpers.ts");
@ -80,6 +81,21 @@ function makeId(length: number): string {
export async function getDefaultNodeWallet(
args: DefaultNodeWalletArgs = {},
): Promise<Wallet> {
const res = await getDefaultNodeWallet2(args);
return res.wallet;
}
/**
* Get a wallet instance with default settings for node.
*
* Extended version that allows getting DB stats.
*/
export async function getDefaultNodeWallet2(
args: DefaultNodeWalletArgs = {},
): Promise<{
wallet: Wallet;
getDbStats: () => AccessStats;
}> {
BridgeIDBFactory.enableTracing = false;
const myBackend = new MemoryBackend();
myBackend.enableTracing = false;
@ -121,7 +137,7 @@ export async function getDefaultNodeWallet(
BridgeIDBFactory.enableTracing = false;
const myBridgeIdbFactory = new BridgeIDBFactory(myBackend);
const myIdbFactory: IDBFactory = (myBridgeIdbFactory as any) as IDBFactory;
const myIdbFactory: IDBFactory = myBridgeIdbFactory as any as IDBFactory;
let myHttpLib;
if (args.httpLib) {
@ -164,5 +180,8 @@ export async function getDefaultNodeWallet(
if (args.notifyHandler) {
w.addNotificationListener(args.notifyHandler);
}
return w;
return {
wallet: w,
getDbStats: () => myBackend.accessStats,
};
}

View File

@ -20,6 +20,9 @@ export * from "./index.js";
export { NodeHttpLib } from "./headless/NodeHttpLib.js";
export {
getDefaultNodeWallet,
getDefaultNodeWallet2,
DefaultNodeWalletArgs,
} from "./headless/helpers.js";
export * from "./crypto/workers/nodeThreadWorker.js";
export type { AccessStats } from "@gnu-taler/idb-bridge";

View File

@ -47,6 +47,7 @@ import {
WireInfo,
WalletStoresV1,
RefreshCoinStatus,
OperationStatus,
} from "../../db.js";
import { PayCoinSelection } from "../../util/coinSelection.js";
import { j2s } from "@gnu-taler/taler-util";
@ -180,8 +181,11 @@ async function getDenomSelStateFromBackup(
const d = await tx.denominations.get([exchangeBaseUrl, s.denom_pub_hash]);
checkBackupInvariant(!!d);
totalCoinValue = Amounts.add(totalCoinValue, d.value).amount;
totalWithdrawCost = Amounts.add(totalWithdrawCost, d.value, d.feeWithdraw)
.amount;
totalWithdrawCost = Amounts.add(
totalWithdrawCost,
d.value,
d.feeWithdraw,
).amount;
}
return {
selectedDenoms,
@ -475,6 +479,8 @@ export async function importBackup(
backupExchangeDetails.base_url,
backupReserve.initial_selected_denoms,
),
// FIXME!
operationStatus: OperationStatus.Pending,
});
}
for (const backupWg of backupReserve.withdrawal_groups) {
@ -507,6 +513,9 @@ export async function importBackup(
timestampFinish: backupWg.timestamp_finish,
withdrawalGroupId: backupWg.withdrawal_group_id,
denomSelUid: backupWg.selected_denoms_id,
operationStatus: backupWg.timestamp_finish
? OperationStatus.Finished
: OperationStatus.Pending,
});
}
}
@ -758,7 +767,8 @@ export async function importBackup(
// FIXME!
payRetryInfo: initRetryInfo(),
download,
paymentSubmitPending: !backupPurchase.timestamp_first_successful_pay,
paymentSubmitPending:
!backupPurchase.timestamp_first_successful_pay,
refundQueryRequested: false,
payCoinSelection: await recoverPayCoinSelection(
tx,
@ -809,10 +819,8 @@ export async function importBackup(
reason = RefreshReason.Scheduled;
break;
}
const refreshSessionPerCoin: (
| RefreshSessionRecord
| undefined
)[] = [];
const refreshSessionPerCoin: (RefreshSessionRecord | undefined)[] =
[];
for (const oldCoin of backupRefreshGroup.old_coins) {
const c = await tx.coins.get(oldCoin.coin_pub);
checkBackupInvariant(!!c);
@ -848,6 +856,9 @@ export async function importBackup(
? RefreshCoinStatus.Finished
: RefreshCoinStatus.Pending,
),
operationStatus: backupRefreshGroup.timestamp_finish
? OperationStatus.Finished
: OperationStatus.Pending,
inputPerCoin: backupRefreshGroup.old_coins.map((x) =>
Amounts.parseOrThrow(x.input_amount),
),

View File

@ -47,6 +47,10 @@ export async function getBalancesInsideTransaction(
withdrawalGroups: typeof WalletStoresV1.withdrawalGroups;
}>,
): Promise<BalancesResponse> {
return {
balances: [],
};
const balanceStore: Record<string, WalletBalance> = {};
/**
@ -148,6 +152,9 @@ export async function getBalancesInsideTransaction(
export async function getBalances(
ws: InternalWalletState,
): Promise<BalancesResponse> {
return {
balances: [],
};
logger.trace("starting to compute balance");
const wbal = await ws.db

View File

@ -50,7 +50,7 @@ import {
getRandomBytes,
stringToBytes,
} from "@gnu-taler/taler-util";
import { DepositGroupRecord } from "../db.js";
import { DepositGroupRecord, OperationStatus } from "../db.js";
import { guardOperationException } from "../errors.js";
import { PayCoinSelection, selectPayCoins } from "../util/coinSelection.js";
import { readSuccessResponseJsonOrThrow } from "../util/http.js";
@ -281,6 +281,7 @@ async function processDepositGroupImpl(
}
if (allDeposited) {
dg.timestampFinished = getTimestampNow();
dg.operationStatus = OperationStatus.Finished;
delete dg.lastError;
delete dg.retryInfo;
await tx.depositGroups.put(dg);
@ -409,11 +410,7 @@ export async function getFeeForDeposit(
refund_deadline: { t_ms: 0 },
};
const contractData = extractContractData(
contractTerms,
"",
"",
);
const contractData = extractContractData(contractTerms, "", "");
const candidates = await getCandidatePayCoins(ws, contractData);
@ -436,7 +433,6 @@ export async function getFeeForDeposit(
amount,
payCoinSel,
);
}
export async function createDepositGroup(
@ -570,6 +566,7 @@ export async function createDepositGroup(
salt: wireSalt,
},
retryInfo: initRetryInfo(),
operationStatus: OperationStatus.Pending,
lastError: undefined,
};
@ -708,8 +705,10 @@ export async function getTotalFeeForDepositAmount(
.filter((x) =>
Amounts.isSameCurrency(x.value, pcs.coinContributions[i]),
);
const amountLeft = Amounts.sub(denom.value, pcs.coinContributions[i])
.amount;
const amountLeft = Amounts.sub(
denom.value,
pcs.coinContributions[i],
).amount;
const refreshCost = getTotalRefreshCost(allDenoms, denom, amountLeft);
refreshFee.push(refreshCost);
}
@ -736,8 +735,17 @@ export async function getTotalFeeForDepositAmount(
});
return {
coin: coinFee.length === 0 ? Amounts.getZero(total.currency) : Amounts.sum(coinFee).amount,
wire: wireFee.length === 0 ? Amounts.getZero(total.currency) : Amounts.sum(wireFee).amount,
refresh: refreshFee.length === 0 ? Amounts.getZero(total.currency) : Amounts.sum(refreshFee).amount
coin:
coinFee.length === 0
? Amounts.getZero(total.currency)
: Amounts.sum(coinFee).amount,
wire:
wireFee.length === 0
? Amounts.getZero(total.currency)
: Amounts.sum(wireFee).amount,
refresh:
refreshFee.length === 0
? Amounts.getZero(total.currency)
: Amounts.sum(refreshFee).amount,
};
}

View File

@ -28,6 +28,7 @@ import {
WalletStoresV1,
BackupProviderStateTag,
RefreshCoinStatus,
OperationStatus,
} from "../db.js";
import {
PendingOperationsResponse,
@ -37,6 +38,8 @@ import {
import {
getTimestampNow,
isTimestampExpired,
j2s,
Logger,
Timestamp,
} from "@gnu-taler/taler-util";
import { InternalWalletState } from "../common.js";
@ -82,33 +85,35 @@ async function gatherReservePending(
now: Timestamp,
resp: PendingOperationsResponse,
): Promise<void> {
await tx.reserves.iter().forEach((reserve) => {
const reserveType = reserve.bankInfo
? ReserveType.TalerBankWithdraw
: ReserveType.Manual;
switch (reserve.reserveStatus) {
case ReserveRecordStatus.DORMANT:
// nothing to report as pending
break;
case ReserveRecordStatus.WAIT_CONFIRM_BANK:
case ReserveRecordStatus.QUERYING_STATUS:
case ReserveRecordStatus.REGISTERING_BANK:
resp.pendingOperations.push({
type: PendingTaskType.Reserve,
givesLifeness: true,
timestampDue: reserve.retryInfo.nextRetry,
stage: reserve.reserveStatus,
timestampCreated: reserve.timestampCreated,
reserveType,
reservePub: reserve.reservePub,
retryInfo: reserve.retryInfo,
});
break;
default:
// FIXME: report problem!
break;
}
});
await tx.reserves.indexes.byStatus
.iter(OperationStatus.Pending)
.forEach((reserve) => {
const reserveType = reserve.bankInfo
? ReserveType.TalerBankWithdraw
: ReserveType.Manual;
switch (reserve.reserveStatus) {
case ReserveRecordStatus.DORMANT:
// nothing to report as pending
break;
case ReserveRecordStatus.WAIT_CONFIRM_BANK:
case ReserveRecordStatus.QUERYING_STATUS:
case ReserveRecordStatus.REGISTERING_BANK:
resp.pendingOperations.push({
type: PendingTaskType.Reserve,
givesLifeness: true,
timestampDue: reserve.retryInfo.nextRetry,
stage: reserve.reserveStatus,
timestampCreated: reserve.timestampCreated,
reserveType,
reservePub: reserve.reservePub,
retryInfo: reserve.retryInfo,
});
break;
default:
// FIXME: report problem!
break;
}
});
}
async function gatherRefreshPending(
@ -116,24 +121,26 @@ async function gatherRefreshPending(
now: Timestamp,
resp: PendingOperationsResponse,
): Promise<void> {
await tx.refreshGroups.iter().forEach((r) => {
if (r.timestampFinished) {
return;
}
if (r.frozen) {
return;
}
resp.pendingOperations.push({
type: PendingTaskType.Refresh,
givesLifeness: true,
timestampDue: r.retryInfo.nextRetry,
refreshGroupId: r.refreshGroupId,
finishedPerCoin: r.statusPerCoin.map(
(x) => x === RefreshCoinStatus.Finished,
),
retryInfo: r.retryInfo,
await tx.refreshGroups.indexes.byStatus
.iter(OperationStatus.Pending)
.forEach((r) => {
if (r.timestampFinished) {
return;
}
if (r.frozen) {
return;
}
resp.pendingOperations.push({
type: PendingTaskType.Refresh,
givesLifeness: true,
timestampDue: r.retryInfo.nextRetry,
refreshGroupId: r.refreshGroupId,
finishedPerCoin: r.statusPerCoin.map(
(x) => x === RefreshCoinStatus.Finished,
),
retryInfo: r.retryInfo,
});
});
});
}
async function gatherWithdrawalPending(
@ -144,29 +151,31 @@ async function gatherWithdrawalPending(
now: Timestamp,
resp: PendingOperationsResponse,
): Promise<void> {
await tx.withdrawalGroups.iter().forEachAsync(async (wsr) => {
if (wsr.timestampFinish) {
return;
}
let numCoinsWithdrawn = 0;
let numCoinsTotal = 0;
await tx.planchets.indexes.byGroup
.iter(wsr.withdrawalGroupId)
.forEach((x) => {
numCoinsTotal++;
if (x.withdrawalDone) {
numCoinsWithdrawn++;
}
await tx.withdrawalGroups.indexes.byStatus
.iter(OperationStatus.Pending)
.forEachAsync(async (wsr) => {
if (wsr.timestampFinish) {
return;
}
let numCoinsWithdrawn = 0;
let numCoinsTotal = 0;
await tx.planchets.indexes.byGroup
.iter(wsr.withdrawalGroupId)
.forEach((x) => {
numCoinsTotal++;
if (x.withdrawalDone) {
numCoinsWithdrawn++;
}
});
resp.pendingOperations.push({
type: PendingTaskType.Withdraw,
givesLifeness: true,
timestampDue: wsr.retryInfo.nextRetry,
withdrawalGroupId: wsr.withdrawalGroupId,
lastError: wsr.lastError,
retryInfo: wsr.retryInfo,
});
resp.pendingOperations.push({
type: PendingTaskType.Withdraw,
givesLifeness: true,
timestampDue: wsr.retryInfo.nextRetry,
withdrawalGroupId: wsr.withdrawalGroupId,
lastError: wsr.lastError,
retryInfo: wsr.retryInfo,
});
});
}
async function gatherProposalPending(
@ -199,20 +208,22 @@ async function gatherDepositPending(
now: Timestamp,
resp: PendingOperationsResponse,
): Promise<void> {
await tx.depositGroups.iter().forEach((dg) => {
if (dg.timestampFinished) {
return;
}
const timestampDue = dg.retryInfo?.nextRetry ?? getTimestampNow();
resp.pendingOperations.push({
type: PendingTaskType.Deposit,
givesLifeness: true,
timestampDue,
depositGroupId: dg.depositGroupId,
lastError: dg.lastError,
retryInfo: dg.retryInfo,
await tx.depositGroups.indexes.byStatus
.iter(OperationStatus.Pending)
.forEach((dg) => {
if (dg.timestampFinished) {
return;
}
const timestampDue = dg.retryInfo?.nextRetry ?? getTimestampNow();
resp.pendingOperations.push({
type: PendingTaskType.Deposit,
givesLifeness: true,
timestampDue,
depositGroupId: dg.depositGroupId,
lastError: dg.lastError,
retryInfo: dg.retryInfo,
});
});
});
}
async function gatherTipPending(

View File

@ -26,6 +26,7 @@ import {
CoinSourceType,
CoinStatus,
DenominationRecord,
OperationStatus,
RefreshCoinStatus,
RefreshGroupRecord,
WalletStoresV1,
@ -127,6 +128,7 @@ function updateGroupStatus(rg: RefreshGroupRecord): void {
rg.retryInfo = initRetryInfo();
} else {
rg.timestampFinished = getTimestampNow();
rg.operationStatus = OperationStatus.Finished;
rg.retryInfo = initRetryInfo();
}
}
@ -929,6 +931,7 @@ export async function createRefreshGroup(
}
const refreshGroup: RefreshGroupRecord = {
operationStatus: OperationStatus.Pending,
timestampFinished: undefined,
statusPerCoin: oldCoinPubs.map(() => RefreshCoinStatus.Pending),
lastError: undefined,

View File

@ -41,6 +41,7 @@ import {
} from "@gnu-taler/taler-util";
import { InternalWalletState } from "../common.js";
import {
OperationStatus,
ReserveBankInfo,
ReserveRecord,
ReserveRecordStatus,
@ -155,6 +156,7 @@ export async function createReserve(
lastError: undefined,
currency: req.amount.currency,
requestedQuery: false,
operationStatus: OperationStatus.Pending,
};
const exchangeInfo = await updateExchangeFromUrl(ws, req.exchange);
@ -250,6 +252,7 @@ export async function forceQueryReserve(
switch (reserve.reserveStatus) {
case ReserveRecordStatus.DORMANT:
reserve.reserveStatus = ReserveRecordStatus.QUERYING_STATUS;
reserve.operationStatus = OperationStatus.Pending;
break;
default:
reserve.requestedQuery = true;
@ -338,6 +341,7 @@ async function registerReserveWithBank(
}
r.timestampReserveInfoPosted = getTimestampNow();
r.reserveStatus = ReserveRecordStatus.WAIT_CONFIRM_BANK;
r.operationStatus = OperationStatus.Pending;
if (!r.bankInfo) {
throw Error("invariant failed");
}
@ -419,6 +423,7 @@ async function processReserveBankStatusImpl(
const now = getTimestampNow();
r.timestampBankConfirmed = now;
r.reserveStatus = ReserveRecordStatus.BANK_ABORTED;
r.operationStatus = OperationStatus.Finished;
r.retryInfo = initRetryInfo();
await tx.reserves.put(r);
});
@ -455,6 +460,7 @@ async function processReserveBankStatusImpl(
const now = getTimestampNow();
r.timestampBankConfirmed = now;
r.reserveStatus = ReserveRecordStatus.QUERYING_STATUS;
r.operationStatus = OperationStatus.Pending;
r.retryInfo = initRetryInfo();
} else {
switch (r.reserveStatus) {
@ -658,6 +664,7 @@ async function updateReserve(
if (denomSelInfo.selectedDenoms.length === 0) {
newReserve.reserveStatus = ReserveRecordStatus.DORMANT;
newReserve.operationStatus = OperationStatus.Finished;
newReserve.lastError = undefined;
newReserve.retryInfo = initRetryInfo();
await tx.reserves.put(newReserve);
@ -684,11 +691,13 @@ async function updateReserve(
denomsSel: denomSelectionInfoToState(denomSelInfo),
secretSeed: encodeCrock(getRandomBytes(64)),
denomSelUid: encodeCrock(getRandomBytes(32)),
operationStatus: OperationStatus.Pending,
};
newReserve.lastError = undefined;
newReserve.retryInfo = initRetryInfo();
newReserve.reserveStatus = ReserveRecordStatus.DORMANT;
newReserve.operationStatus = OperationStatus.Finished;
await tx.reserves.put(newReserve);
await tx.withdrawalGroups.put(withdrawalRecord);

View File

@ -53,6 +53,7 @@ import {
DenomSelectionState,
ExchangeDetailsRecord,
ExchangeRecord,
OperationStatus,
PlanchetRecord,
} from "../db.js";
import { walletCoreDebugFlags } from "../util/debugFlags.js";
@ -968,7 +969,8 @@ async function processWithdrawGroupImpl(
if (wg.timestampFinish === undefined && numFinished === numTotalCoins) {
finishedForFirstTime = true;
wg.timestampFinish = getTimestampNow();
wg.lastError = undefined;
wg.operationStatus = OperationStatus.Finished;
delete wg.lastError;
wg.retryInfo = initRetryInfo();
}