idb-bridge: faster indices, various correctness fixes and tests
This commit is contained in:
parent
2237058bcc
commit
e84a1789af
@ -19,7 +19,6 @@
|
|||||||
"@rollup/plugin-commonjs": "^17.1.0",
|
"@rollup/plugin-commonjs": "^17.1.0",
|
||||||
"@rollup/plugin-json": "^4.1.0",
|
"@rollup/plugin-json": "^4.1.0",
|
||||||
"@rollup/plugin-node-resolve": "^11.2.0",
|
"@rollup/plugin-node-resolve": "^11.2.0",
|
||||||
"@types/lodash": "^4.14.178",
|
|
||||||
"@types/node": "^14.14.22",
|
"@types/node": "^14.14.22",
|
||||||
"ava": "^3.15.0",
|
"ava": "^3.15.0",
|
||||||
"esm": "^3.2.25",
|
"esm": "^3.2.25",
|
||||||
@ -29,7 +28,6 @@
|
|||||||
"typescript": "^4.1.3"
|
"typescript": "^4.1.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"lodash": "^4.17.21",
|
|
||||||
"tslib": "^2.1.0"
|
"tslib": "^2.1.0"
|
||||||
},
|
},
|
||||||
"ava": {
|
"ava": {
|
||||||
|
@ -23,6 +23,12 @@ import {
|
|||||||
BridgeIDBRequest,
|
BridgeIDBRequest,
|
||||||
BridgeIDBTransaction,
|
BridgeIDBTransaction,
|
||||||
} from "./bridge-idb";
|
} from "./bridge-idb";
|
||||||
|
import {
|
||||||
|
IDBCursorDirection,
|
||||||
|
IDBCursorWithValue,
|
||||||
|
IDBKeyRange,
|
||||||
|
IDBValidKey,
|
||||||
|
} from "./idbtypes.js";
|
||||||
import { MemoryBackend } from "./MemoryBackend";
|
import { MemoryBackend } from "./MemoryBackend";
|
||||||
|
|
||||||
function promiseFromRequest(request: BridgeIDBRequest): Promise<any> {
|
function promiseFromRequest(request: BridgeIDBRequest): Promise<any> {
|
||||||
@ -104,6 +110,7 @@ test("Spec: Example 1 Part 2", async (t) => {
|
|||||||
|
|
||||||
test("Spec: Example 1 Part 3", async (t) => {
|
test("Spec: Example 1 Part 3", async (t) => {
|
||||||
const backend = new MemoryBackend();
|
const backend = new MemoryBackend();
|
||||||
|
backend.enableTracing = true;
|
||||||
const idb = new BridgeIDBFactory(backend);
|
const idb = new BridgeIDBFactory(backend);
|
||||||
|
|
||||||
const request = idb.open("library");
|
const request = idb.open("library");
|
||||||
@ -348,3 +355,184 @@ test("export", async (t) => {
|
|||||||
t.is(exportedData2.databases["library"].schema.databaseVersion, 42);
|
t.is(exportedData2.databases["library"].schema.databaseVersion, 42);
|
||||||
t.pass();
|
t.pass();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("range queries", async (t) => {
|
||||||
|
const backend = new MemoryBackend();
|
||||||
|
backend.enableTracing = true;
|
||||||
|
const idb = new BridgeIDBFactory(backend);
|
||||||
|
|
||||||
|
const request = idb.open("mydb");
|
||||||
|
request.onupgradeneeded = () => {
|
||||||
|
const db = request.result;
|
||||||
|
const store = db.createObjectStore("bla", { keyPath: "x" });
|
||||||
|
store.createIndex("by_y", "y");
|
||||||
|
store.createIndex("by_z", "z");
|
||||||
|
};
|
||||||
|
|
||||||
|
const db: BridgeIDBDatabase = await promiseFromRequest(request);
|
||||||
|
|
||||||
|
t.is(db.name, "mydb");
|
||||||
|
|
||||||
|
const tx = db.transaction("bla", "readwrite");
|
||||||
|
|
||||||
|
const store = tx.objectStore("bla");
|
||||||
|
|
||||||
|
store.put({ x: 0, y: "a" });
|
||||||
|
store.put({ x: 2, y: "a" });
|
||||||
|
store.put({ x: 4, y: "b" });
|
||||||
|
store.put({ x: 8, y: "b" });
|
||||||
|
store.put({ x: 10, y: "c" });
|
||||||
|
store.put({ x: 12, y: "c" });
|
||||||
|
|
||||||
|
await promiseFromTransaction(tx);
|
||||||
|
|
||||||
|
async function doCursorStoreQuery(
|
||||||
|
range: IDBKeyRange | IDBValidKey | undefined,
|
||||||
|
direction: IDBCursorDirection | undefined,
|
||||||
|
expected: any[],
|
||||||
|
): Promise<void> {
|
||||||
|
const tx = db.transaction("bla", "readwrite");
|
||||||
|
const store = tx.objectStore("bla");
|
||||||
|
const vals: any[] = [];
|
||||||
|
|
||||||
|
const req = store.openCursor(range, direction);
|
||||||
|
while (1) {
|
||||||
|
await promiseFromRequest(req);
|
||||||
|
const cursor: IDBCursorWithValue = req.result;
|
||||||
|
if (!cursor) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
cursor.continue();
|
||||||
|
vals.push(cursor.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
await promiseFromTransaction(tx);
|
||||||
|
|
||||||
|
t.deepEqual(vals, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doCursorIndexQuery(
|
||||||
|
range: IDBKeyRange | IDBValidKey | undefined,
|
||||||
|
direction: IDBCursorDirection | undefined,
|
||||||
|
expected: any[],
|
||||||
|
): Promise<void> {
|
||||||
|
const tx = db.transaction("bla", "readwrite");
|
||||||
|
const store = tx.objectStore("bla");
|
||||||
|
const index = store.index("by_y");
|
||||||
|
const vals: any[] = [];
|
||||||
|
|
||||||
|
const req = index.openCursor(range, direction);
|
||||||
|
while (1) {
|
||||||
|
await promiseFromRequest(req);
|
||||||
|
const cursor: IDBCursorWithValue = req.result;
|
||||||
|
if (!cursor) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
cursor.continue();
|
||||||
|
vals.push(cursor.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
await promiseFromTransaction(tx);
|
||||||
|
|
||||||
|
t.deepEqual(vals, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
await doCursorStoreQuery(undefined, undefined, [
|
||||||
|
{
|
||||||
|
x: 0,
|
||||||
|
y: "a",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 2,
|
||||||
|
y: "a",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 4,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 8,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 10,
|
||||||
|
y: "c",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 12,
|
||||||
|
y: "c",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
await doCursorStoreQuery(
|
||||||
|
BridgeIDBKeyRange.bound(0, 12, true, true),
|
||||||
|
undefined,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
x: 2,
|
||||||
|
y: "a",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 4,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 8,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 10,
|
||||||
|
y: "c",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
await doCursorIndexQuery(
|
||||||
|
BridgeIDBKeyRange.bound("a", "c", true, true),
|
||||||
|
undefined,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
x: 4,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 8,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
await doCursorIndexQuery(undefined, "nextunique", [
|
||||||
|
{
|
||||||
|
x: 0,
|
||||||
|
y: "a",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 4,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 10,
|
||||||
|
y: "c",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
await doCursorIndexQuery(undefined, "prevunique", [
|
||||||
|
{
|
||||||
|
x: 10,
|
||||||
|
y: "c",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 4,
|
||||||
|
y: "b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
x: 0,
|
||||||
|
y: "a",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
db.close();
|
||||||
|
|
||||||
|
t.pass();
|
||||||
|
});
|
||||||
|
@ -33,7 +33,7 @@ import {
|
|||||||
structuredRevive,
|
structuredRevive,
|
||||||
} from "./util/structuredClone";
|
} from "./util/structuredClone";
|
||||||
import { ConstraintError, DataError } from "./util/errors";
|
import { ConstraintError, DataError } from "./util/errors";
|
||||||
import BTree, { ISortedMapF } from "./tree/b+tree";
|
import BTree, { ISortedMapF, ISortedSetF } from "./tree/b+tree";
|
||||||
import { compareKeys } from "./util/cmp";
|
import { compareKeys } from "./util/cmp";
|
||||||
import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue";
|
import { StoreKeyResult, makeStoreKeyValue } from "./util/makeStoreKeyValue";
|
||||||
import { getIndexKeys } from "./util/getIndexKeys";
|
import { getIndexKeys } from "./util/getIndexKeys";
|
||||||
@ -95,18 +95,11 @@ interface Database {
|
|||||||
connectionCookies: string[];
|
connectionCookies: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @public */
|
|
||||||
export interface IndexDump {
|
|
||||||
name: string;
|
|
||||||
records: IndexRecord[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @public */
|
/** @public */
|
||||||
export interface ObjectStoreDump {
|
export interface ObjectStoreDump {
|
||||||
name: string;
|
name: string;
|
||||||
keyGenerator: number;
|
keyGenerator: number;
|
||||||
records: ObjectStoreRecord[];
|
records: ObjectStoreRecord[];
|
||||||
indexes: { [name: string]: IndexDump };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @public */
|
/** @public */
|
||||||
@ -140,7 +133,7 @@ interface Connection {
|
|||||||
/** @public */
|
/** @public */
|
||||||
export interface IndexRecord {
|
export interface IndexRecord {
|
||||||
indexKey: Key;
|
indexKey: Key;
|
||||||
primaryKeys: Key[];
|
primaryKeys: ISortedSetF<Key>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @public */
|
/** @public */
|
||||||
@ -185,6 +178,27 @@ function nextStoreKey<T>(
|
|||||||
return res[1].primaryKey;
|
return res[1].primaryKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function assertInvariant(cond: boolean): asserts cond {
|
||||||
|
if (!cond) {
|
||||||
|
throw Error("invariant failed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function nextKey(
|
||||||
|
forward: boolean,
|
||||||
|
tree: ISortedSetF<IDBValidKey>,
|
||||||
|
key: IDBValidKey | undefined,
|
||||||
|
): IDBValidKey | undefined {
|
||||||
|
if (key != null) {
|
||||||
|
return forward ? tree.nextHigherKey(key) : tree.nextLowerKey(key);
|
||||||
|
}
|
||||||
|
return forward ? tree.minKey() : tree.maxKey();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the key that is furthest in
|
||||||
|
* the direction indicated by the 'forward' flag.
|
||||||
|
*/
|
||||||
function furthestKey(
|
function furthestKey(
|
||||||
forward: boolean,
|
forward: boolean,
|
||||||
key1: Key | undefined,
|
key1: Key | undefined,
|
||||||
@ -258,22 +272,20 @@ export class MemoryBackend implements Backend {
|
|||||||
* Must be called before any connections to the database backend have
|
* Must be called before any connections to the database backend have
|
||||||
* been made.
|
* been made.
|
||||||
*/
|
*/
|
||||||
importDump(data: any) {
|
importDump(dataJson: any) {
|
||||||
if (this.enableTracing) {
|
|
||||||
console.log("importing dump (a)");
|
|
||||||
}
|
|
||||||
if (this.transactionIdCounter != 1 || this.connectionIdCounter != 1) {
|
if (this.transactionIdCounter != 1 || this.connectionIdCounter != 1) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"data must be imported before first transaction or connection",
|
"data must be imported before first transaction or connection",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: validate!
|
||||||
|
const data = structuredRevive(dataJson) as MemoryBackendDump;
|
||||||
|
|
||||||
if (typeof data !== "object") {
|
if (typeof data !== "object") {
|
||||||
throw Error("db dump corrupt");
|
throw Error("db dump corrupt");
|
||||||
}
|
}
|
||||||
|
|
||||||
data = structuredRevive(data);
|
|
||||||
|
|
||||||
this.databases = {};
|
this.databases = {};
|
||||||
|
|
||||||
for (const dbName of Object.keys(data.databases)) {
|
for (const dbName of Object.keys(data.databases)) {
|
||||||
@ -285,29 +297,10 @@ export class MemoryBackend implements Backend {
|
|||||||
for (const objectStoreName of Object.keys(
|
for (const objectStoreName of Object.keys(
|
||||||
data.databases[dbName].objectStores,
|
data.databases[dbName].objectStores,
|
||||||
)) {
|
)) {
|
||||||
const dumpedObjectStore =
|
const storeSchema = schema.objectStores[objectStoreName];
|
||||||
|
const dumpedObjectStore: ObjectStoreDump =
|
||||||
data.databases[dbName].objectStores[objectStoreName];
|
data.databases[dbName].objectStores[objectStoreName];
|
||||||
|
|
||||||
const indexes: { [name: string]: Index } = {};
|
|
||||||
for (const indexName of Object.keys(dumpedObjectStore.indexes)) {
|
|
||||||
const dumpedIndex = dumpedObjectStore.indexes[indexName];
|
|
||||||
const pairs = dumpedIndex.records.map((r: any) => {
|
|
||||||
return structuredClone([r.indexKey, r]);
|
|
||||||
});
|
|
||||||
const indexData: ISortedMapF<Key, IndexRecord> = new BTree(
|
|
||||||
pairs,
|
|
||||||
compareKeys,
|
|
||||||
);
|
|
||||||
const index: Index = {
|
|
||||||
deleted: false,
|
|
||||||
modifiedData: undefined,
|
|
||||||
modifiedName: undefined,
|
|
||||||
originalName: indexName,
|
|
||||||
originalData: indexData,
|
|
||||||
};
|
|
||||||
indexes[indexName] = index;
|
|
||||||
}
|
|
||||||
|
|
||||||
const pairs = dumpedObjectStore.records.map((r: any) => {
|
const pairs = dumpedObjectStore.records.map((r: any) => {
|
||||||
return structuredClone([r.primaryKey, r]);
|
return structuredClone([r.primaryKey, r]);
|
||||||
});
|
});
|
||||||
@ -323,10 +316,34 @@ export class MemoryBackend implements Backend {
|
|||||||
originalData: objectStoreData,
|
originalData: objectStoreData,
|
||||||
originalName: objectStoreName,
|
originalName: objectStoreName,
|
||||||
originalKeyGenerator: dumpedObjectStore.keyGenerator,
|
originalKeyGenerator: dumpedObjectStore.keyGenerator,
|
||||||
committedIndexes: indexes,
|
committedIndexes: {},
|
||||||
modifiedIndexes: {},
|
modifiedIndexes: {},
|
||||||
};
|
};
|
||||||
objectStores[objectStoreName] = objectStore;
|
objectStores[objectStoreName] = objectStore;
|
||||||
|
|
||||||
|
for (const indexName in storeSchema.indexes) {
|
||||||
|
const indexSchema = storeSchema.indexes[indexName];
|
||||||
|
const newIndex: Index = {
|
||||||
|
deleted: false,
|
||||||
|
modifiedData: undefined,
|
||||||
|
modifiedName: undefined,
|
||||||
|
originalData: new BTree([], compareKeys),
|
||||||
|
originalName: indexName,
|
||||||
|
};
|
||||||
|
const storeData = objectStoreData;
|
||||||
|
|
||||||
|
storeData.forEach((v, k) => {
|
||||||
|
try {
|
||||||
|
this.insertIntoIndex(newIndex, k, v.value, indexSchema);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof DataError) {
|
||||||
|
// We don't propagate this error here.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const db: Database = {
|
const db: Database = {
|
||||||
deleted: false,
|
deleted: false,
|
||||||
@ -368,16 +385,6 @@ export class MemoryBackend implements Backend {
|
|||||||
const objectStores: { [name: string]: ObjectStoreDump } = {};
|
const objectStores: { [name: string]: ObjectStoreDump } = {};
|
||||||
for (const objectStoreName of Object.keys(db.committedObjectStores)) {
|
for (const objectStoreName of Object.keys(db.committedObjectStores)) {
|
||||||
const objectStore = db.committedObjectStores[objectStoreName];
|
const objectStore = db.committedObjectStores[objectStoreName];
|
||||||
|
|
||||||
const indexes: { [name: string]: IndexDump } = {};
|
|
||||||
for (const indexName of Object.keys(objectStore.committedIndexes)) {
|
|
||||||
const index = objectStore.committedIndexes[indexName];
|
|
||||||
const indexRecords: IndexRecord[] = [];
|
|
||||||
index.originalData.forEach((v: IndexRecord) => {
|
|
||||||
indexRecords.push(structuredClone(v));
|
|
||||||
});
|
|
||||||
indexes[indexName] = { name: indexName, records: indexRecords };
|
|
||||||
}
|
|
||||||
const objectStoreRecords: ObjectStoreRecord[] = [];
|
const objectStoreRecords: ObjectStoreRecord[] = [];
|
||||||
objectStore.originalData.forEach((v: ObjectStoreRecord) => {
|
objectStore.originalData.forEach((v: ObjectStoreRecord) => {
|
||||||
objectStoreRecords.push(structuredClone(v));
|
objectStoreRecords.push(structuredClone(v));
|
||||||
@ -386,7 +393,6 @@ export class MemoryBackend implements Backend {
|
|||||||
name: objectStoreName,
|
name: objectStoreName,
|
||||||
records: objectStoreRecords,
|
records: objectStoreRecords,
|
||||||
keyGenerator: objectStore.originalKeyGenerator,
|
keyGenerator: objectStore.originalKeyGenerator,
|
||||||
indexes: indexes,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const dbDump: DatabaseDump = {
|
const dbDump: DatabaseDump = {
|
||||||
@ -1047,17 +1053,17 @@ export class MemoryBackend implements Backend {
|
|||||||
indexProperties.multiEntry,
|
indexProperties.multiEntry,
|
||||||
);
|
);
|
||||||
for (const indexKey of indexKeys) {
|
for (const indexKey of indexKeys) {
|
||||||
const existingRecord = indexData.get(indexKey);
|
const existingIndexRecord = indexData.get(indexKey);
|
||||||
if (!existingRecord) {
|
if (!existingIndexRecord) {
|
||||||
throw Error("db inconsistent: expected index entry missing");
|
throw Error("db inconsistent: expected index entry missing");
|
||||||
}
|
}
|
||||||
const newPrimaryKeys = existingRecord.primaryKeys.filter(
|
const newPrimaryKeys = existingIndexRecord.primaryKeys.without(
|
||||||
(x) => compareKeys(x, primaryKey) !== 0,
|
primaryKey,
|
||||||
);
|
);
|
||||||
if (newPrimaryKeys.length === 0) {
|
if (newPrimaryKeys.size === 0) {
|
||||||
index.modifiedData = indexData.without(indexKey);
|
index.modifiedData = indexData.without(indexKey);
|
||||||
} else {
|
} else {
|
||||||
const newIndexRecord = {
|
const newIndexRecord: IndexRecord = {
|
||||||
indexKey,
|
indexKey,
|
||||||
primaryKeys: newPrimaryKeys,
|
primaryKeys: newPrimaryKeys,
|
||||||
};
|
};
|
||||||
@ -1117,11 +1123,6 @@ export class MemoryBackend implements Backend {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let numResults = 0;
|
|
||||||
let indexKeys: Key[] = [];
|
|
||||||
let primaryKeys: Key[] = [];
|
|
||||||
let values: Value[] = [];
|
|
||||||
|
|
||||||
const forward: boolean =
|
const forward: boolean =
|
||||||
req.direction === "next" || req.direction === "nextunique";
|
req.direction === "next" || req.direction === "nextunique";
|
||||||
const unique: boolean =
|
const unique: boolean =
|
||||||
@ -1133,280 +1134,44 @@ export class MemoryBackend implements Backend {
|
|||||||
|
|
||||||
const haveIndex = req.indexName !== undefined;
|
const haveIndex = req.indexName !== undefined;
|
||||||
|
|
||||||
|
let resp: RecordGetResponse;
|
||||||
|
|
||||||
if (haveIndex) {
|
if (haveIndex) {
|
||||||
const index =
|
const index =
|
||||||
myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!];
|
myConn.objectStoreMap[req.objectStoreName].indexMap[req.indexName!];
|
||||||
const indexData = index.modifiedData || index.originalData;
|
const indexData = index.modifiedData || index.originalData;
|
||||||
let indexPos = req.lastIndexPosition;
|
resp = getIndexRecords({
|
||||||
|
forward,
|
||||||
if (indexPos === undefined) {
|
indexData,
|
||||||
// First time we iterate! So start at the beginning (lower/upper)
|
storeData,
|
||||||
// of our allowed range.
|
limit: req.limit,
|
||||||
indexPos = forward ? range.lower : range.upper;
|
unique,
|
||||||
}
|
range,
|
||||||
|
resultLevel: req.resultLevel,
|
||||||
let primaryPos = req.lastObjectStorePosition;
|
advanceIndexKey: req.advanceIndexKey,
|
||||||
|
advancePrimaryKey: req.advancePrimaryKey,
|
||||||
// We might have to advance the index key further!
|
lastIndexPosition: req.lastIndexPosition,
|
||||||
if (req.advanceIndexKey !== undefined) {
|
lastObjectStorePosition: req.lastObjectStorePosition,
|
||||||
const compareResult = compareKeys(req.advanceIndexKey, indexPos);
|
});
|
||||||
if ((forward && compareResult > 0) || (!forward && compareResult > 0)) {
|
|
||||||
indexPos = req.advanceIndexKey;
|
|
||||||
} else if (compareResult == 0 && req.advancePrimaryKey !== undefined) {
|
|
||||||
// index keys are the same, so advance the primary key
|
|
||||||
if (primaryPos === undefined) {
|
|
||||||
primaryPos = req.advancePrimaryKey;
|
|
||||||
} else {
|
|
||||||
const primCompareResult = compareKeys(
|
|
||||||
req.advancePrimaryKey,
|
|
||||||
primaryPos,
|
|
||||||
);
|
|
||||||
if (
|
|
||||||
(forward && primCompareResult > 0) ||
|
|
||||||
(!forward && primCompareResult < 0)
|
|
||||||
) {
|
|
||||||
primaryPos = req.advancePrimaryKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (indexPos === undefined || indexPos === null) {
|
|
||||||
indexPos = forward ? indexData.minKey() : indexData.maxKey();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (indexPos === undefined) {
|
|
||||||
throw Error("invariant violated");
|
|
||||||
}
|
|
||||||
|
|
||||||
let indexEntry: IndexRecord | undefined;
|
|
||||||
indexEntry = indexData.get(indexPos);
|
|
||||||
if (!indexEntry) {
|
|
||||||
const res = forward
|
|
||||||
? indexData.nextHigherPair(indexPos)
|
|
||||||
: indexData.nextLowerPair(indexPos);
|
|
||||||
if (res) {
|
|
||||||
indexEntry = res[1];
|
|
||||||
indexPos = indexEntry.indexKey;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (unique) {
|
|
||||||
while (1) {
|
|
||||||
if (req.limit != 0 && numResults == req.limit) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (indexPos === undefined) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (!range.includes(indexPos)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (indexEntry === undefined) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
req.lastIndexPosition === null ||
|
|
||||||
req.lastIndexPosition === undefined ||
|
|
||||||
compareKeys(indexEntry.indexKey, req.lastIndexPosition) !== 0
|
|
||||||
) {
|
|
||||||
indexKeys.push(indexEntry.indexKey);
|
|
||||||
primaryKeys.push(indexEntry.primaryKeys[0]);
|
|
||||||
numResults++;
|
|
||||||
}
|
|
||||||
|
|
||||||
const res: any = forward
|
|
||||||
? indexData.nextHigherPair(indexPos)
|
|
||||||
: indexData.nextLowerPair(indexPos);
|
|
||||||
if (res) {
|
|
||||||
indexPos = res[1].indexKey;
|
|
||||||
indexEntry = res[1] as IndexRecord;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let primkeySubPos = 0;
|
|
||||||
|
|
||||||
// Sort out the case where the index key is the same, so we have
|
|
||||||
// to get the prev/next primary key
|
|
||||||
if (
|
|
||||||
indexEntry !== undefined &&
|
|
||||||
req.lastIndexPosition !== undefined &&
|
|
||||||
compareKeys(indexEntry.indexKey, req.lastIndexPosition) === 0
|
|
||||||
) {
|
|
||||||
let pos = forward ? 0 : indexEntry.primaryKeys.length - 1;
|
|
||||||
this.enableTracing &&
|
|
||||||
console.log(
|
|
||||||
"number of primary keys",
|
|
||||||
indexEntry.primaryKeys.length,
|
|
||||||
);
|
|
||||||
this.enableTracing && console.log("start pos is", pos);
|
|
||||||
// Advance past the lastObjectStorePosition
|
|
||||||
do {
|
|
||||||
const cmpResult = compareKeys(
|
|
||||||
req.lastObjectStorePosition,
|
|
||||||
indexEntry.primaryKeys[pos],
|
|
||||||
);
|
|
||||||
this.enableTracing && console.log("cmp result is", cmpResult);
|
|
||||||
if ((forward && cmpResult < 0) || (!forward && cmpResult > 0)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
pos += forward ? 1 : -1;
|
|
||||||
this.enableTracing && console.log("now pos is", pos);
|
|
||||||
} while (pos >= 0 && pos < indexEntry.primaryKeys.length);
|
|
||||||
|
|
||||||
// Make sure we're at least at advancedPrimaryPos
|
|
||||||
while (
|
|
||||||
primaryPos !== undefined &&
|
|
||||||
pos >= 0 &&
|
|
||||||
pos < indexEntry.primaryKeys.length
|
|
||||||
) {
|
|
||||||
const cmpResult = compareKeys(
|
|
||||||
primaryPos,
|
|
||||||
indexEntry.primaryKeys[pos],
|
|
||||||
);
|
|
||||||
if ((forward && cmpResult <= 0) || (!forward && cmpResult >= 0)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
pos += forward ? 1 : -1;
|
|
||||||
}
|
|
||||||
primkeySubPos = pos;
|
|
||||||
} else if (indexEntry !== undefined) {
|
|
||||||
primkeySubPos = forward ? 0 : indexEntry.primaryKeys.length - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.enableTracing) {
|
|
||||||
console.log("subPos=", primkeySubPos);
|
|
||||||
console.log("indexPos=", indexPos);
|
|
||||||
}
|
|
||||||
|
|
||||||
while (1) {
|
|
||||||
if (req.limit != 0 && numResults == req.limit) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (indexPos === undefined) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (!range.includes(indexPos)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (indexEntry === undefined) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
primkeySubPos < 0 ||
|
|
||||||
primkeySubPos >= indexEntry.primaryKeys.length
|
|
||||||
) {
|
|
||||||
const res: any = forward
|
|
||||||
? indexData.nextHigherPair(indexPos)
|
|
||||||
: indexData.nextLowerPair(indexPos);
|
|
||||||
if (res) {
|
|
||||||
indexPos = res[1].indexKey;
|
|
||||||
indexEntry = res[1];
|
|
||||||
primkeySubPos = forward ? 0 : indexEntry!.primaryKeys.length - 1;
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
indexKeys.push(indexEntry.indexKey);
|
|
||||||
primaryKeys.push(indexEntry.primaryKeys[primkeySubPos]);
|
|
||||||
numResults++;
|
|
||||||
primkeySubPos += forward ? 1 : -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now we can collect the values based on the primary keys,
|
|
||||||
// if requested.
|
|
||||||
if (req.resultLevel === ResultLevel.Full) {
|
|
||||||
for (let i = 0; i < numResults; i++) {
|
|
||||||
const result = storeData.get(primaryKeys[i]);
|
|
||||||
if (!result) {
|
|
||||||
console.error("invariant violated during read");
|
|
||||||
console.error("request was", req);
|
|
||||||
throw Error("invariant violated during read");
|
|
||||||
}
|
|
||||||
values.push(result.value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// only based on object store, no index involved, phew!
|
|
||||||
let storePos = req.lastObjectStorePosition;
|
|
||||||
if (storePos === undefined) {
|
|
||||||
storePos = forward ? range.lower : range.upper;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.advanceIndexKey !== undefined) {
|
if (req.advanceIndexKey !== undefined) {
|
||||||
throw Error("unsupported request");
|
throw Error("unsupported request");
|
||||||
}
|
}
|
||||||
|
resp = getObjectStoreRecords({
|
||||||
storePos = furthestKey(forward, req.advancePrimaryKey, storePos);
|
forward,
|
||||||
|
storeData,
|
||||||
if (storePos !== null && storePos !== undefined) {
|
limit: req.limit,
|
||||||
// Advance store position if we are either still at the last returned
|
range,
|
||||||
// store key, or if we are currently not on a key.
|
resultLevel: req.resultLevel,
|
||||||
const storeEntry = storeData.get(storePos);
|
advancePrimaryKey: req.advancePrimaryKey,
|
||||||
if (this.enableTracing) {
|
lastIndexPosition: req.lastIndexPosition,
|
||||||
console.log("store entry:", storeEntry);
|
lastObjectStorePosition: req.lastObjectStorePosition,
|
||||||
}
|
});
|
||||||
if (
|
|
||||||
!storeEntry ||
|
|
||||||
(req.lastObjectStorePosition !== undefined &&
|
|
||||||
compareKeys(req.lastObjectStorePosition, storePos) === 0)
|
|
||||||
) {
|
|
||||||
storePos = storeData.nextHigherKey(storePos);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
storePos = forward ? storeData.minKey() : storeData.maxKey();
|
|
||||||
if (this.enableTracing) {
|
|
||||||
console.log("setting starting store pos to", storePos);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
while (1) {
|
|
||||||
if (req.limit != 0 && numResults == req.limit) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (storePos === null || storePos === undefined) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (!range.includes(storePos)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = storeData.get(storePos);
|
|
||||||
|
|
||||||
if (res === undefined) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.resultLevel >= ResultLevel.OnlyKeys) {
|
|
||||||
primaryKeys.push(structuredClone(storePos));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.resultLevel >= ResultLevel.Full) {
|
|
||||||
values.push(structuredClone(res.value));
|
|
||||||
}
|
|
||||||
|
|
||||||
numResults++;
|
|
||||||
storePos = nextStoreKey(forward, storeData, storePos);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (this.enableTracing) {
|
if (this.enableTracing) {
|
||||||
console.log(`TRACING: getRecords got ${numResults} results`);
|
console.log(`TRACING: getRecords got ${resp.count} results`);
|
||||||
}
|
}
|
||||||
return {
|
return resp;
|
||||||
count: numResults,
|
|
||||||
indexKeys:
|
|
||||||
req.resultLevel >= ResultLevel.OnlyKeys && haveIndex
|
|
||||||
? indexKeys
|
|
||||||
: undefined,
|
|
||||||
primaryKeys:
|
|
||||||
req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
|
|
||||||
values: req.resultLevel >= ResultLevel.Full ? values : undefined,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async storeRecord(
|
async storeRecord(
|
||||||
@ -1586,21 +1351,20 @@ export class MemoryBackend implements Backend {
|
|||||||
if (indexProperties.unique) {
|
if (indexProperties.unique) {
|
||||||
throw new ConstraintError();
|
throw new ConstraintError();
|
||||||
} else {
|
} else {
|
||||||
const pred = (x: Key) => compareKeys(x, primaryKey) === 0;
|
const newIndexRecord: IndexRecord = {
|
||||||
if (existingRecord.primaryKeys.findIndex(pred) === -1) {
|
indexKey: indexKey,
|
||||||
const newIndexRecord = {
|
primaryKeys: existingRecord.primaryKeys.with(primaryKey),
|
||||||
indexKey: indexKey,
|
};
|
||||||
primaryKeys: [...existingRecord.primaryKeys, primaryKey].sort(
|
index.modifiedData = indexData.with(indexKey, newIndexRecord, true);
|
||||||
compareKeys,
|
|
||||||
),
|
|
||||||
};
|
|
||||||
index.modifiedData = indexData.with(indexKey, newIndexRecord, true);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
const primaryKeys: ISortedSetF<IDBValidKey> = new BTree(
|
||||||
|
[[primaryKey, undefined]],
|
||||||
|
compareKeys,
|
||||||
|
);
|
||||||
const newIndexRecord: IndexRecord = {
|
const newIndexRecord: IndexRecord = {
|
||||||
indexKey: indexKey,
|
indexKey: indexKey,
|
||||||
primaryKeys: [primaryKey],
|
primaryKeys,
|
||||||
};
|
};
|
||||||
index.modifiedData = indexData.with(indexKey, newIndexRecord, true);
|
index.modifiedData = indexData.with(indexKey, newIndexRecord, true);
|
||||||
}
|
}
|
||||||
@ -1699,3 +1463,286 @@ export class MemoryBackend implements Backend {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getIndexRecords(req: {
|
||||||
|
indexData: ISortedMapF<IDBValidKey, IndexRecord>;
|
||||||
|
storeData: ISortedMapF<IDBValidKey, ObjectStoreRecord>;
|
||||||
|
lastIndexPosition?: IDBValidKey;
|
||||||
|
forward: boolean;
|
||||||
|
unique: boolean;
|
||||||
|
range: IDBKeyRange;
|
||||||
|
lastObjectStorePosition?: IDBValidKey;
|
||||||
|
advancePrimaryKey?: IDBValidKey;
|
||||||
|
advanceIndexKey?: IDBValidKey;
|
||||||
|
limit: number;
|
||||||
|
resultLevel: ResultLevel;
|
||||||
|
}): RecordGetResponse {
|
||||||
|
let numResults = 0;
|
||||||
|
const indexKeys: Key[] = [];
|
||||||
|
const primaryKeys: Key[] = [];
|
||||||
|
const values: Value[] = [];
|
||||||
|
const { unique, range, forward, indexData } = req;
|
||||||
|
let indexPos = req.lastIndexPosition;
|
||||||
|
let objectStorePos: IDBValidKey | undefined = undefined;
|
||||||
|
let indexEntry: IndexRecord | undefined = undefined;
|
||||||
|
const rangeStart = forward ? range.lower : range.upper;
|
||||||
|
const dataStart = forward ? indexData.minKey() : indexData.maxKey();
|
||||||
|
indexPos = furthestKey(forward, indexPos, rangeStart);
|
||||||
|
indexPos = furthestKey(forward, indexPos, dataStart);
|
||||||
|
|
||||||
|
function nextIndexEntry(): IndexRecord | undefined {
|
||||||
|
assertInvariant(indexPos != null);
|
||||||
|
const res: [IDBValidKey, IndexRecord] | undefined = forward
|
||||||
|
? indexData.nextHigherPair(indexPos)
|
||||||
|
: indexData.nextLowerPair(indexPos);
|
||||||
|
if (res) {
|
||||||
|
indexEntry = res[1];
|
||||||
|
indexPos = indexEntry.indexKey;
|
||||||
|
return indexEntry;
|
||||||
|
} else {
|
||||||
|
indexEntry = undefined;
|
||||||
|
indexPos = undefined;
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function packResult(): RecordGetResponse {
|
||||||
|
return {
|
||||||
|
count: numResults,
|
||||||
|
indexKeys:
|
||||||
|
req.resultLevel >= ResultLevel.OnlyKeys ? indexKeys : undefined,
|
||||||
|
primaryKeys:
|
||||||
|
req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
|
||||||
|
values: req.resultLevel >= ResultLevel.Full ? values : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (indexPos == null) {
|
||||||
|
return packResult();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now we align at indexPos and after objectStorePos
|
||||||
|
|
||||||
|
indexEntry = indexData.get(indexPos);
|
||||||
|
if (!indexEntry) {
|
||||||
|
// We're not aligned to an index key, go to next index entry
|
||||||
|
nextIndexEntry();
|
||||||
|
}
|
||||||
|
if (indexEntry) {
|
||||||
|
objectStorePos = nextKey(
|
||||||
|
true,
|
||||||
|
indexEntry.primaryKeys,
|
||||||
|
req.lastObjectStorePosition,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
forward &&
|
||||||
|
range.lowerOpen &&
|
||||||
|
range.lower != null &&
|
||||||
|
compareKeys(range.lower, indexPos) === 0
|
||||||
|
) {
|
||||||
|
const e = nextIndexEntry();
|
||||||
|
objectStorePos = e?.primaryKeys.minKey();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!forward &&
|
||||||
|
range.upperOpen &&
|
||||||
|
range.upper != null &&
|
||||||
|
compareKeys(range.upper, indexPos) === 0
|
||||||
|
) {
|
||||||
|
const e = nextIndexEntry();
|
||||||
|
objectStorePos = e?.primaryKeys.minKey();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
unique &&
|
||||||
|
indexPos != null &&
|
||||||
|
req.lastIndexPosition != null &&
|
||||||
|
compareKeys(indexPos, req.lastIndexPosition) === 0
|
||||||
|
) {
|
||||||
|
const e = nextIndexEntry();
|
||||||
|
objectStorePos = e?.primaryKeys.minKey();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.advancePrimaryKey) {
|
||||||
|
indexPos = furthestKey(forward, indexPos, req.advanceIndexKey);
|
||||||
|
if (indexPos) {
|
||||||
|
indexEntry = indexData.get(indexPos);
|
||||||
|
if (!indexEntry) {
|
||||||
|
nextIndexEntry();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use advancePrimaryKey if necessary
|
||||||
|
if (
|
||||||
|
req.advanceIndexKey != null &&
|
||||||
|
req.advancePrimaryKey &&
|
||||||
|
indexPos != null &&
|
||||||
|
indexEntry &&
|
||||||
|
compareKeys(indexPos, req.advanceIndexKey) == 0
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
objectStorePos == null ||
|
||||||
|
compareKeys(req.advancePrimaryKey, objectStorePos) > 0
|
||||||
|
) {
|
||||||
|
objectStorePos = nextKey(
|
||||||
|
true,
|
||||||
|
indexEntry.primaryKeys,
|
||||||
|
req.advancePrimaryKey,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while (1) {
|
||||||
|
if (indexPos === undefined) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (req.limit != 0 && numResults == req.limit) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (!range.includes(indexPos)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (indexEntry === undefined) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (objectStorePos == null) {
|
||||||
|
// We don't have any more records with the current index key.
|
||||||
|
nextIndexEntry();
|
||||||
|
if (indexEntry) {
|
||||||
|
objectStorePos = indexEntry.primaryKeys.minKey();
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
indexKeys.push(indexEntry.indexKey);
|
||||||
|
primaryKeys.push(objectStorePos);
|
||||||
|
numResults++;
|
||||||
|
if (unique) {
|
||||||
|
objectStorePos = undefined;
|
||||||
|
} else {
|
||||||
|
objectStorePos = indexEntry.primaryKeys.nextHigherKey(objectStorePos);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now we can collect the values based on the primary keys,
|
||||||
|
// if requested.
|
||||||
|
if (req.resultLevel === ResultLevel.Full) {
|
||||||
|
for (let i = 0; i < numResults; i++) {
|
||||||
|
const result = req.storeData.get(primaryKeys[i]);
|
||||||
|
if (!result) {
|
||||||
|
console.error("invariant violated during read");
|
||||||
|
console.error("request was", req);
|
||||||
|
throw Error("invariant violated during read");
|
||||||
|
}
|
||||||
|
values.push(result.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return packResult();
|
||||||
|
}
|
||||||
|
|
||||||
|
function getObjectStoreRecords(req: {
|
||||||
|
storeData: ISortedMapF<IDBValidKey, ObjectStoreRecord>;
|
||||||
|
lastIndexPosition?: IDBValidKey;
|
||||||
|
forward: boolean;
|
||||||
|
range: IDBKeyRange;
|
||||||
|
lastObjectStorePosition?: IDBValidKey;
|
||||||
|
advancePrimaryKey?: IDBValidKey;
|
||||||
|
limit: number;
|
||||||
|
resultLevel: ResultLevel;
|
||||||
|
}): RecordGetResponse {
|
||||||
|
let numResults = 0;
|
||||||
|
const indexKeys: Key[] = [];
|
||||||
|
const primaryKeys: Key[] = [];
|
||||||
|
const values: Value[] = [];
|
||||||
|
const { storeData, range, forward } = req;
|
||||||
|
|
||||||
|
function packResult(): RecordGetResponse {
|
||||||
|
return {
|
||||||
|
count: numResults,
|
||||||
|
indexKeys:
|
||||||
|
req.resultLevel >= ResultLevel.OnlyKeys ? indexKeys : undefined,
|
||||||
|
primaryKeys:
|
||||||
|
req.resultLevel >= ResultLevel.OnlyKeys ? primaryKeys : undefined,
|
||||||
|
values: req.resultLevel >= ResultLevel.Full ? values : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const rangeStart = forward ? range.lower : range.upper;
|
||||||
|
const dataStart = forward ? storeData.minKey() : storeData.maxKey();
|
||||||
|
let storePos = req.lastObjectStorePosition;
|
||||||
|
storePos = furthestKey(forward, storePos, rangeStart);
|
||||||
|
storePos = furthestKey(forward, storePos, dataStart);
|
||||||
|
storePos = furthestKey(forward, storePos, req.advancePrimaryKey);
|
||||||
|
|
||||||
|
if (storePos != null) {
|
||||||
|
// Advance store position if we are either still at the last returned
|
||||||
|
// store key, or if we are currently not on a key.
|
||||||
|
const storeEntry = storeData.get(storePos);
|
||||||
|
if (
|
||||||
|
!storeEntry ||
|
||||||
|
(req.lastObjectStorePosition != null &&
|
||||||
|
compareKeys(req.lastObjectStorePosition, storePos) === 0)
|
||||||
|
) {
|
||||||
|
storePos = forward
|
||||||
|
? storeData.nextHigherKey(storePos)
|
||||||
|
: storeData.nextLowerKey(storePos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
storePos = forward ? storeData.minKey() : storeData.maxKey();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
storePos != null &&
|
||||||
|
forward &&
|
||||||
|
range.lowerOpen &&
|
||||||
|
range.lower != null &&
|
||||||
|
compareKeys(range.lower, storePos) === 0
|
||||||
|
) {
|
||||||
|
storePos = storeData.nextHigherKey(storePos);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
storePos != null &&
|
||||||
|
!forward &&
|
||||||
|
range.upperOpen &&
|
||||||
|
range.upper != null &&
|
||||||
|
compareKeys(range.upper, storePos) === 0
|
||||||
|
) {
|
||||||
|
storePos = storeData.nextLowerKey(storePos);
|
||||||
|
}
|
||||||
|
|
||||||
|
while (1) {
|
||||||
|
if (req.limit != 0 && numResults == req.limit) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (storePos === null || storePos === undefined) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (!range.includes(storePos)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = storeData.get(storePos);
|
||||||
|
|
||||||
|
if (res === undefined) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.resultLevel >= ResultLevel.OnlyKeys) {
|
||||||
|
primaryKeys.push(structuredClone(storePos));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (req.resultLevel >= ResultLevel.Full) {
|
||||||
|
values.push(structuredClone(res.value));
|
||||||
|
}
|
||||||
|
|
||||||
|
numResults++;
|
||||||
|
storePos = nextStoreKey(forward, storeData, storePos);
|
||||||
|
}
|
||||||
|
|
||||||
|
return packResult();
|
||||||
|
}
|
||||||
|
@ -103,7 +103,7 @@ export interface RecordGetRequest {
|
|||||||
advancePrimaryKey?: IDBValidKey;
|
advancePrimaryKey?: IDBValidKey;
|
||||||
/**
|
/**
|
||||||
* Maximum number of results to return.
|
* Maximum number of results to return.
|
||||||
* If -1, return all available results
|
* If 0, return all available results
|
||||||
*/
|
*/
|
||||||
limit: number;
|
limit: number;
|
||||||
resultLevel: ResultLevel;
|
resultLevel: ResultLevel;
|
||||||
|
@ -1132,8 +1132,6 @@ export class BridgeIDBIndex implements IDBIndex {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
BridgeIDBFactory.enableTracing && console.log("opening cursor on", this);
|
|
||||||
|
|
||||||
this._confirmActiveTransaction();
|
this._confirmActiveTransaction();
|
||||||
|
|
||||||
range = simplifyRange(range);
|
range = simplifyRange(range);
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import { BridgeIDBCursor, BridgeIDBKeyRange } from "..";
|
import { BridgeIDBCursor, BridgeIDBKeyRange } from "..";
|
||||||
import { BridgeIDBCursorWithValue } from "../bridge-idb";
|
|
||||||
import { IDBDatabase } from "../idbtypes";
|
|
||||||
import {
|
import {
|
||||||
createDatabase,
|
createDatabase,
|
||||||
createdb,
|
createdb,
|
||||||
|
@ -16,7 +16,6 @@ import { Listener } from "./util/FakeEventTarget";
|
|||||||
import {
|
import {
|
||||||
DatabaseDump,
|
DatabaseDump,
|
||||||
ObjectStoreDump,
|
ObjectStoreDump,
|
||||||
IndexDump,
|
|
||||||
IndexRecord,
|
IndexRecord,
|
||||||
ObjectStoreRecord,
|
ObjectStoreRecord,
|
||||||
MemoryBackendDump,
|
MemoryBackendDump,
|
||||||
@ -64,7 +63,6 @@ export type {
|
|||||||
RequestObj,
|
RequestObj,
|
||||||
DatabaseDump,
|
DatabaseDump,
|
||||||
ObjectStoreDump,
|
ObjectStoreDump,
|
||||||
IndexDump,
|
|
||||||
IndexRecord,
|
IndexRecord,
|
||||||
ObjectStoreRecord,
|
ObjectStoreRecord,
|
||||||
IndexProperties,
|
IndexProperties,
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,28 +1,4 @@
|
|||||||
/*
|
// B+ tree by David Piepgrass. License: MIT
|
||||||
Copyright (c) 2018 David Piepgrass
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: MIT
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Original repository: https://github.com/qwertie/btree-typescript
|
|
||||||
|
|
||||||
/** Read-only set interface (subinterface of IMapSource<K,any>).
|
/** Read-only set interface (subinterface of IMapSource<K,any>).
|
||||||
* The word "set" usually means that each item in the collection is unique
|
* The word "set" usually means that each item in the collection is unique
|
||||||
@ -350,6 +326,8 @@ export interface IMapF<K = any, V = any> extends IMapSource<K, V>, ISetF<K> {
|
|||||||
export interface ISortedSetF<K = any> extends ISetF<K>, ISortedSetSource<K> {
|
export interface ISortedSetF<K = any> extends ISetF<K>, ISortedSetSource<K> {
|
||||||
// TypeScript requires this method of ISortedSetSource to be repeated
|
// TypeScript requires this method of ISortedSetSource to be repeated
|
||||||
keys(firstKey?: K): IterableIterator<K>;
|
keys(firstKey?: K): IterableIterator<K>;
|
||||||
|
without(key: K): ISortedSetF<K>;
|
||||||
|
with(key: K): ISortedSetF<K>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ISortedMapF<K = any, V = any>
|
export interface ISortedMapF<K = any, V = any>
|
||||||
|
@ -46,9 +46,16 @@ test("structured clone", (t) => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("structured clone (cycles)", (t) => {
|
test("structured clone (array cycles)", (t) => {
|
||||||
const obj1: any[] = [1, 2];
|
const obj1: any[] = [1, 2];
|
||||||
obj1.push(obj1);
|
obj1.push(obj1);
|
||||||
const obj1Clone = structuredClone(obj1);
|
const obj1Clone = structuredClone(obj1);
|
||||||
t.is(obj1Clone, obj1Clone[2]);
|
t.is(obj1Clone, obj1Clone[2]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("structured clone (object cycles)", (t) => {
|
||||||
|
const obj1: any = { a: 1, b: 2 };
|
||||||
|
obj1.c = obj1;
|
||||||
|
const obj1Clone = structuredClone(obj1);
|
||||||
|
t.is(obj1Clone, obj1Clone.c);
|
||||||
|
});
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
permissions and limitations under the License.
|
permissions and limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import cloneDeep from "lodash/cloneDeep";
|
import { DataCloneError } from "./errors.js";
|
||||||
|
|
||||||
const { toString: toStr } = {};
|
const { toString: toStr } = {};
|
||||||
const hasOwn = {}.hasOwnProperty;
|
const hasOwn = {}.hasOwnProperty;
|
||||||
@ -77,6 +77,100 @@ function isRegExp(val: any): boolean {
|
|||||||
return toStringTag(val) === "RegExp";
|
return toStringTag(val) === "RegExp";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function copyBuffer(cur: any) {
|
||||||
|
if (cur instanceof Buffer) {
|
||||||
|
return Buffer.from(cur);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new cur.constructor(cur.buffer.slice(), cur.byteOffset, cur.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkCloneableOrThrow(x: any) {
|
||||||
|
if (x == null) return;
|
||||||
|
if (typeof x !== "object" && typeof x !== "function") return;
|
||||||
|
if (x instanceof Date) return;
|
||||||
|
if (Array.isArray(x)) return;
|
||||||
|
if (x instanceof Map) return;
|
||||||
|
if (x instanceof Set) return;
|
||||||
|
if (isUserObject(x)) return;
|
||||||
|
if (isPlainObject(x)) return;
|
||||||
|
throw new DataCloneError();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function mkDeepClone() {
|
||||||
|
const refs = [] as any;
|
||||||
|
const refsNew = [] as any;
|
||||||
|
|
||||||
|
return clone;
|
||||||
|
|
||||||
|
function cloneArray(a: any) {
|
||||||
|
var keys = Object.keys(a);
|
||||||
|
var a2 = new Array(keys.length);
|
||||||
|
refs.push(a);
|
||||||
|
refsNew.push(a2);
|
||||||
|
for (var i = 0; i < keys.length; i++) {
|
||||||
|
var k = keys[i] as any;
|
||||||
|
var cur = a[k];
|
||||||
|
checkCloneableOrThrow(cur);
|
||||||
|
if (typeof cur !== "object" || cur === null) {
|
||||||
|
a2[k] = cur;
|
||||||
|
} else if (cur instanceof Date) {
|
||||||
|
a2[k] = new Date(cur);
|
||||||
|
} else if (ArrayBuffer.isView(cur)) {
|
||||||
|
a2[k] = copyBuffer(cur);
|
||||||
|
} else {
|
||||||
|
var index = refs.indexOf(cur);
|
||||||
|
if (index !== -1) {
|
||||||
|
a2[k] = refsNew[index];
|
||||||
|
} else {
|
||||||
|
a2[k] = clone(cur);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
refs.pop();
|
||||||
|
refsNew.pop();
|
||||||
|
return a2;
|
||||||
|
}
|
||||||
|
|
||||||
|
function clone(o: any) {
|
||||||
|
checkCloneableOrThrow(o);
|
||||||
|
if (typeof o !== "object" || o === null) return o;
|
||||||
|
if (o instanceof Date) return new Date(o);
|
||||||
|
if (Array.isArray(o)) return cloneArray(o);
|
||||||
|
if (o instanceof Map) return new Map(cloneArray(Array.from(o)));
|
||||||
|
if (o instanceof Set) return new Set(cloneArray(Array.from(o)));
|
||||||
|
var o2 = {} as any;
|
||||||
|
refs.push(o);
|
||||||
|
refsNew.push(o2);
|
||||||
|
for (var k in o) {
|
||||||
|
if (Object.hasOwnProperty.call(o, k) === false) continue;
|
||||||
|
var cur = o[k] as any;
|
||||||
|
checkCloneableOrThrow(cur);
|
||||||
|
if (typeof cur !== "object" || cur === null) {
|
||||||
|
o2[k] = cur;
|
||||||
|
} else if (cur instanceof Date) {
|
||||||
|
o2[k] = new Date(cur);
|
||||||
|
} else if (cur instanceof Map) {
|
||||||
|
o2[k] = new Map(cloneArray(Array.from(cur)));
|
||||||
|
} else if (cur instanceof Set) {
|
||||||
|
o2[k] = new Set(cloneArray(Array.from(cur)));
|
||||||
|
} else if (ArrayBuffer.isView(cur)) {
|
||||||
|
o2[k] = copyBuffer(cur);
|
||||||
|
} else {
|
||||||
|
var i = refs.indexOf(cur);
|
||||||
|
if (i !== -1) {
|
||||||
|
o2[k] = refsNew[i];
|
||||||
|
} else {
|
||||||
|
o2[k] = clone(cur);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
refs.pop();
|
||||||
|
refsNew.pop();
|
||||||
|
return o2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function internalEncapsulate(
|
function internalEncapsulate(
|
||||||
val: any,
|
val: any,
|
||||||
outRoot: any,
|
outRoot: any,
|
||||||
@ -262,5 +356,5 @@ export function structuredRevive(val: any): any {
|
|||||||
* Structured clone for IndexedDB.
|
* Structured clone for IndexedDB.
|
||||||
*/
|
*/
|
||||||
export function structuredClone(val: any): any {
|
export function structuredClone(val: any): any {
|
||||||
return cloneDeep(val);
|
return mkDeepClone()(val);
|
||||||
}
|
}
|
||||||
|
@ -129,24 +129,20 @@ importers:
|
|||||||
'@rollup/plugin-commonjs': ^17.1.0
|
'@rollup/plugin-commonjs': ^17.1.0
|
||||||
'@rollup/plugin-json': ^4.1.0
|
'@rollup/plugin-json': ^4.1.0
|
||||||
'@rollup/plugin-node-resolve': ^11.2.0
|
'@rollup/plugin-node-resolve': ^11.2.0
|
||||||
'@types/lodash': ^4.14.178
|
|
||||||
'@types/node': ^14.14.22
|
'@types/node': ^14.14.22
|
||||||
ava: ^3.15.0
|
ava: ^3.15.0
|
||||||
esm: ^3.2.25
|
esm: ^3.2.25
|
||||||
lodash: ^4.17.21
|
|
||||||
prettier: ^2.2.1
|
prettier: ^2.2.1
|
||||||
rimraf: ^3.0.2
|
rimraf: ^3.0.2
|
||||||
rollup: ^2.37.1
|
rollup: ^2.37.1
|
||||||
tslib: ^2.1.0
|
tslib: ^2.1.0
|
||||||
typescript: ^4.1.3
|
typescript: ^4.1.3
|
||||||
dependencies:
|
dependencies:
|
||||||
lodash: 4.17.21
|
|
||||||
tslib: 2.1.0
|
tslib: 2.1.0
|
||||||
devDependencies:
|
devDependencies:
|
||||||
'@rollup/plugin-commonjs': 17.1.0_rollup@2.37.1
|
'@rollup/plugin-commonjs': 17.1.0_rollup@2.37.1
|
||||||
'@rollup/plugin-json': 4.1.0_rollup@2.37.1
|
'@rollup/plugin-json': 4.1.0_rollup@2.37.1
|
||||||
'@rollup/plugin-node-resolve': 11.2.0_rollup@2.37.1
|
'@rollup/plugin-node-resolve': 11.2.0_rollup@2.37.1
|
||||||
'@types/lodash': 4.14.178
|
|
||||||
'@types/node': 14.14.22
|
'@types/node': 14.14.22
|
||||||
ava: 3.15.0
|
ava: 3.15.0
|
||||||
esm: 3.2.25
|
esm: 3.2.25
|
||||||
@ -10169,10 +10165,6 @@ packages:
|
|||||||
resolution: {integrity: sha1-7ihweulOEdK4J7y+UnC86n8+ce4=}
|
resolution: {integrity: sha1-7ihweulOEdK4J7y+UnC86n8+ce4=}
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
/@types/lodash/4.14.178:
|
|
||||||
resolution: {integrity: sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw==}
|
|
||||||
dev: true
|
|
||||||
|
|
||||||
/@types/markdown-to-jsx/6.11.3:
|
/@types/markdown-to-jsx/6.11.3:
|
||||||
resolution: {integrity: sha512-30nFYpceM/ZEvhGiqWjm5quLUxNeld0HCzJEXMZZDpq53FPkS85mTwkWtCXzCqq8s5JYLgM5W392a02xn8Bdaw==}
|
resolution: {integrity: sha512-30nFYpceM/ZEvhGiqWjm5quLUxNeld0HCzJEXMZZDpq53FPkS85mTwkWtCXzCqq8s5JYLgM5W392a02xn8Bdaw==}
|
||||||
dependencies:
|
dependencies:
|
||||||
@ -11573,7 +11565,7 @@ packages:
|
|||||||
/axios/0.21.4:
|
/axios/0.21.4:
|
||||||
resolution: {integrity: sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==}
|
resolution: {integrity: sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==}
|
||||||
dependencies:
|
dependencies:
|
||||||
follow-redirects: 1.14.5
|
follow-redirects: 1.14.5_debug@4.3.2
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- debug
|
- debug
|
||||||
dev: true
|
dev: true
|
||||||
@ -15713,16 +15705,6 @@ packages:
|
|||||||
optional: true
|
optional: true
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/follow-redirects/1.14.5:
|
|
||||||
resolution: {integrity: sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA==}
|
|
||||||
engines: {node: '>=4.0'}
|
|
||||||
peerDependencies:
|
|
||||||
debug: '*'
|
|
||||||
peerDependenciesMeta:
|
|
||||||
debug:
|
|
||||||
optional: true
|
|
||||||
dev: true
|
|
||||||
|
|
||||||
/follow-redirects/1.14.5_debug@4.3.2:
|
/follow-redirects/1.14.5_debug@4.3.2:
|
||||||
resolution: {integrity: sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA==}
|
resolution: {integrity: sha512-wtphSXy7d4/OR+MvIFbCVBDzZ5520qV8XfPklSN5QtxuMUJZ+b0Wnst1e1lCDocfzuCkHqj8k0FpZqO+UIaKNA==}
|
||||||
engines: {node: '>=4.0'}
|
engines: {node: '>=4.0'}
|
||||||
@ -19335,6 +19317,7 @@ packages:
|
|||||||
|
|
||||||
/lodash/4.17.21:
|
/lodash/4.17.21:
|
||||||
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
|
resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
|
||||||
|
dev: true
|
||||||
|
|
||||||
/log-symbols/4.1.0:
|
/log-symbols/4.1.0:
|
||||||
resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==}
|
resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==}
|
||||||
@ -20948,7 +20931,7 @@ packages:
|
|||||||
resolution: {integrity: sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==}
|
resolution: {integrity: sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
dependencies:
|
dependencies:
|
||||||
ts-pnp: 1.2.0_typescript@4.4.3
|
ts-pnp: 1.2.0_typescript@4.3.5
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- typescript
|
- typescript
|
||||||
dev: true
|
dev: true
|
||||||
@ -25024,6 +25007,18 @@ packages:
|
|||||||
tslib: 2.3.1
|
tslib: 2.3.1
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
|
/ts-pnp/1.2.0_typescript@4.3.5:
|
||||||
|
resolution: {integrity: sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==}
|
||||||
|
engines: {node: '>=6'}
|
||||||
|
peerDependencies:
|
||||||
|
typescript: '*'
|
||||||
|
peerDependenciesMeta:
|
||||||
|
typescript:
|
||||||
|
optional: true
|
||||||
|
dependencies:
|
||||||
|
typescript: 4.3.5
|
||||||
|
dev: true
|
||||||
|
|
||||||
/ts-pnp/1.2.0_typescript@4.4.3:
|
/ts-pnp/1.2.0_typescript@4.4.3:
|
||||||
resolution: {integrity: sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==}
|
resolution: {integrity: sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
|
Loading…
Reference in New Issue
Block a user