WIP
parent
12a9cc24db
commit
bf4f8f8da3
@ -1,13 +1,39 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const lmdb = require("lmdb");
|
// function prefixSearch(...) {
|
||||||
|
// ...
|
||||||
|
// }
|
||||||
|
|
||||||
function prefixSearch(...) {
|
module.exports = function createLMDBBackend(instance) {
|
||||||
...
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = function createLMDBBackend() {
|
|
||||||
return {
|
return {
|
||||||
|
getKey: function (key) {
|
||||||
|
// return value, or throw if non-existent
|
||||||
|
return instance.get(key);
|
||||||
|
},
|
||||||
|
putKey: function (key, value) {
|
||||||
|
// TODO: compare-and-set in API?
|
||||||
|
// NOTE: returns `false` if `ifVersion` check failed, but we are not currently using that feature
|
||||||
|
return instance.put(key, value);
|
||||||
|
},
|
||||||
|
getKeyRange: function* (lowestKey, lowestInclusive, highestKey, highestInclusive) {
|
||||||
|
// NOTE: Range requests in lmdb are start-inclusive but end-exclusive
|
||||||
|
yield* instance.getRange({ start: lowestKey, end: highestKey })
|
||||||
|
.filter(({ key }) => {
|
||||||
|
if (!lowestInclusive && lowestKey.equals(key)) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (highestInclusive && instance.doesExist(highestKey)) {
|
||||||
|
yield { key: highestKey, value: instance.get(highestKey) };
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
runInTransaction: function (callback) {
|
||||||
|
|
||||||
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -0,0 +1,75 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const sortedBtree = require("sorted-btree").default;
|
||||||
|
|
||||||
|
const { toBigint, toBuffer } = require("../storage-encoder/bigint/buffer");
|
||||||
|
|
||||||
|
// TODO: Implement specific safety behaviours:
|
||||||
|
// - consistent view during range iteration (via shallow clone?)
|
||||||
|
// - consistent view during transactions (via shallow clone?)
|
||||||
|
|
||||||
|
module.exports = function createMemoryBackend(_options) {
|
||||||
|
let store = new sortedBtree();
|
||||||
|
|
||||||
|
return {
|
||||||
|
getKey: function (key) {
|
||||||
|
// return value, or throw if non-existent
|
||||||
|
return store.get(toBigint(key));
|
||||||
|
},
|
||||||
|
putKey: function (key, value) {
|
||||||
|
// TODO: compare-and-set in API?
|
||||||
|
store.set(toBigint(key), value);
|
||||||
|
},
|
||||||
|
getKeyRange: function* (lowestKey, lowestInclusive, highestKey, highestInclusive) {
|
||||||
|
let $lowestKey = toBigint(lowestKey);
|
||||||
|
let $highestKey = toBigint(highestKey);
|
||||||
|
|
||||||
|
// inclusive on both ends! return iterator/stream of {key,value} pairs
|
||||||
|
let firstValue = (lowestInclusive)
|
||||||
|
? store.get($lowestKey)
|
||||||
|
: undefined; // FIXME: Check .has instead
|
||||||
|
|
||||||
|
if (firstValue != null) {
|
||||||
|
yield { key: lowestKey, value: firstValue };
|
||||||
|
}
|
||||||
|
|
||||||
|
let currentKey = $lowestKey;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
let pair = store.nextHigherPair(currentKey);
|
||||||
|
|
||||||
|
if (pair != null) {
|
||||||
|
let [ key, value ] = pair;
|
||||||
|
|
||||||
|
if (
|
||||||
|
(key === $highestKey && !highestInclusive)
|
||||||
|
|| (key > $highestKey)
|
||||||
|
) {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
currentKey = key;
|
||||||
|
yield { key: toBuffer(key), value: value };
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// We've run out of items
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
runInTransaction: function (callback) {
|
||||||
|
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
storeRecord(collection, id)
|
||||||
|
deleteRecord(collection, id)
|
||||||
|
hasRecord(collection, id)
|
||||||
|
fetchRecord(collection, id)
|
||||||
|
fetchRecords(collection, lowestID, highestID)
|
||||||
|
addToIndex(index, id)
|
||||||
|
removeFromIndex(index, id)
|
||||||
|
fetchIndexRecord(index, id)
|
||||||
|
fetchIndexRecords(index, lowestID, highestID)
|
||||||
|
*/
|
@ -0,0 +1,140 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const util = require("util");
|
||||||
|
const lmdb = require("lmdb");
|
||||||
|
const mapObj = require("map-obj");
|
||||||
|
|
||||||
|
const createMemoryBackend = require("./memory");
|
||||||
|
const createLMDBBackend = require("./lmdb");
|
||||||
|
const reduceMigrations = require("../schema/reducer");
|
||||||
|
const createRecordCoder = require("../storage-encoder/record-coder");
|
||||||
|
|
||||||
|
function stringifyIterator(iterator) {
|
||||||
|
// return "[" + Array.from(iterator).join(", ") + "]";
|
||||||
|
return util.inspect(Array.from(iterator).map((item) => item.value), { colors: true });
|
||||||
|
// return util.inspect(Array.from(iterator));
|
||||||
|
}
|
||||||
|
|
||||||
|
function buf(number) {
|
||||||
|
return Buffer.from([ number ]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// let backendChoice = "memory";
|
||||||
|
let backendChoice = "lmdb";
|
||||||
|
|
||||||
|
let dummyMigrations = [
|
||||||
|
{ id: 1, operations: [
|
||||||
|
{ type: "createCollection", name: "users", operations: [
|
||||||
|
{ type: "createField", name: "username", operations: [
|
||||||
|
{ type: "setFieldType", fieldType: "string" },
|
||||||
|
{ type: "setAttribute", attribute: "required", value: true }
|
||||||
|
]},
|
||||||
|
{ type: "createField", name: "passwordHash", operations: [
|
||||||
|
{ type: "setFieldType", fieldType: "string" },
|
||||||
|
{ type: "setAttribute", attribute: "required", value: true }
|
||||||
|
]},
|
||||||
|
{ type: "createField", name: "emailAddress", operations: [
|
||||||
|
{ type: "setFieldType", fieldType: "string" },
|
||||||
|
{ type: "setAttribute", attribute: "required", value: false }
|
||||||
|
]},
|
||||||
|
{ type: "createField", name: "isActive", operations: [
|
||||||
|
{ type: "setFieldType", fieldType: "boolean" },
|
||||||
|
{ type: "setAttribute", attribute: "required", value: true }
|
||||||
|
]},
|
||||||
|
{ type: "createField", name: "registrationDate", operations: [
|
||||||
|
{ type: "setFieldType", fieldType: "date" },
|
||||||
|
{ type: "setAttribute", attribute: "required", value: true },
|
||||||
|
{ type: "setAttribute", attribute: "withTimezone", value: false },
|
||||||
|
]},
|
||||||
|
{ type: "createField", name: "invitesLeft", operations: [
|
||||||
|
{ type: "setFieldType", fieldType: "integer" },
|
||||||
|
{ type: "setAttribute", attribute: "required", value: true },
|
||||||
|
]},
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
{ id: 2, operations: [
|
||||||
|
{ type: "modifyCollection", name: "users", operations: [
|
||||||
|
{ type: "modifyField", name: "emailAddress", operations: [
|
||||||
|
{ type: "setAttribute", attribute: "required", value: true },
|
||||||
|
]},
|
||||||
|
// FIXME: Disallow no-ops for attribute changes?
|
||||||
|
// { type: "modifyField", name: "isActive", operations: [
|
||||||
|
// { type: "setAttribute", attribute: "required", value: true },
|
||||||
|
// ]},
|
||||||
|
{ type: "modifyField", name: "registrationDate", operations: [
|
||||||
|
{ type: "setAttribute", attribute: "withTimezone", value: true },
|
||||||
|
{ type: "rollbackTo", transformer: (value) => value.toUTC() }
|
||||||
|
]},
|
||||||
|
{ type: "modifyField", name: "invitesLeft", operations: [
|
||||||
|
{ type: "setAttribute", attribute: "signed", value: false },
|
||||||
|
]},
|
||||||
|
{ type: "createField", name: "sendNewsletter", operations: [
|
||||||
|
{ type: "setFieldType", fieldType: "boolean" },
|
||||||
|
{ type: "setAttribute", attribute: "required", value: true }, // FIXME: Enforce a default in this case! Otherwise existing columns would be invalid -- actually this should be handled by 'migration defaults' specifically, without requiring a default for new records
|
||||||
|
// FIXME: The below lazy function is currently getting evaluated at schema reduce time, because of the immutable deep merge. *Really* need to work this into merge-by-template instead to prevent cases like this!
|
||||||
|
{ type: "setAttribute", attribute: "defaultValue", value: () => false }, // FIXME: Always specified as a value-producing function, or also allow literals?
|
||||||
|
]},
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
];
|
||||||
|
|
||||||
|
let schema = reduceMigrations(dummyMigrations);
|
||||||
|
|
||||||
|
console.dir({schema}, {depth:null});
|
||||||
|
|
||||||
|
function createRecord(collectionName, data) {
|
||||||
|
let collectionSchema = schema.collections[collectionName];
|
||||||
|
|
||||||
|
let fields = mapObj(collectionSchema.fields, (key, value) => {
|
||||||
|
return [
|
||||||
|
key,
|
||||||
|
value.schema // Extract only the *current* schema, not the transforms
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
let coder = createRecordCoder(fields);
|
||||||
|
|
||||||
|
return coder.encode(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
// console.log(schema);
|
||||||
|
// MARKER: createRecord takes a schema *array*, because fields need to have a well-defined order for consistent encoding. Need to convert from object format to array format, and also update recordCoder so that it can deal with the new internal schema representation format (eg. no longer a nested `attributes` object)
|
||||||
|
console.log(createRecord("users", {
|
||||||
|
username: "joepie91",
|
||||||
|
passwordHash: "foo",
|
||||||
|
emailAddress: "admin@cryto.net",
|
||||||
|
isActive: true,
|
||||||
|
registrationDate: new Date(),
|
||||||
|
invitesLeft: 10
|
||||||
|
}));
|
||||||
|
|
||||||
|
return;
|
||||||
|
|
||||||
|
(async function() {
|
||||||
|
// FIXME: match-value
|
||||||
|
let db = lmdb.open({ keyEncoding: "binary" });
|
||||||
|
let backend = (backendChoice === "lmdb")
|
||||||
|
? createLMDBBackend(db)
|
||||||
|
: createMemoryBackend();
|
||||||
|
|
||||||
|
await backend.putKey(buf(1), "one");
|
||||||
|
await backend.putKey(buf(2), "two");
|
||||||
|
await backend.putKey(buf(3), "three");
|
||||||
|
await backend.putKey(buf(4), "four");
|
||||||
|
await backend.putKey(buf(5), "five");
|
||||||
|
await backend.putKey(buf(6), "six");
|
||||||
|
await backend.putKey(buf(7), "seven");
|
||||||
|
await backend.putKey(buf(8), "eight");
|
||||||
|
await backend.putKey(buf(9), "nine");
|
||||||
|
await backend.putKey(buf(10), "ten");
|
||||||
|
|
||||||
|
console.log(backend.getKey(buf(4)));
|
||||||
|
|
||||||
|
console.log(Array.from(db.getRange()));
|
||||||
|
console.log(Array.from(db.getRange({ start: buf(2), end: buf(6) })));
|
||||||
|
|
||||||
|
console.log(stringifyIterator(backend.getKeyRange(buf(2), true, buf(6), true)));
|
||||||
|
console.log(stringifyIterator(backend.getKeyRange(buf(2), false, buf(6), true)));
|
||||||
|
console.log(stringifyIterator(backend.getKeyRange(buf(2), true, buf(6), false)));
|
||||||
|
console.log(stringifyIterator(backend.getKeyRange(buf(2), false, buf(6), false)));
|
||||||
|
})();
|
@ -0,0 +1,12 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
// FIXME: Ensure that this separator is escaped everywhere else
|
||||||
|
const SEPARATOR = Buffer.from(":");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
objectID: (collection, id) => Buffer.concat([
|
||||||
|
Buffer.from(collection),
|
||||||
|
SEPARATOR,
|
||||||
|
id
|
||||||
|
])
|
||||||
|
};
|
Loading…
Reference in New Issue