Sven Slootweg 3 months ago
parent
commit
bf4f8f8da3
  1. 4
      package.json
  2. 40
      src/backend/lmdb.js
  3. 75
      src/backend/memory.js
  4. 140
      src/backend/test-memory.js
  5. 12
      src/construct-key.js
  6. 2
      src/packages/immutable-deep-merge/index.js
  7. 411
      src/schema/reducer.js
  8. 12
      src/schema/rules.js
  9. 7
      src/util/compose.js
  10. 948
      yarn.lock

4
package.json

@ -32,10 +32,12 @@
"cartesian-product": "^2.1.2",
"default-value": "^1.0.0",
"fix-esm": "^1.0.1",
"lmdb": "^1.6.6",
"lmdb": "^2.6.0-alpha6",
"map-obj": "^4",
"match-value": "^1.1.0",
"merge-by-template": "^0.1.3",
"seed-random": "^2.2.0",
"sorted-btree": "^1.8.0",
"split-filter-n": "^1.1.3",
"syncpipe": "^1.0.0",
"time-call": "^0.1.0",

40
src/backend/lmdb.js

@ -1,13 +1,39 @@
"use strict";
const lmdb = require("lmdb");
// function prefixSearch(...) {
// ...
// }
function prefixSearch(...) {
...
}
module.exports = function createLMDBBackend() {
module.exports = function createLMDBBackend(instance) {
return {
getKey: function (key) {
// return value, or throw if non-existent
return instance.get(key);
},
putKey: function (key, value) {
// TODO: compare-and-set in API?
// NOTE: returns `false` if `ifVersion` check failed, but we are not currently using that feature
return instance.put(key, value);
},
getKeyRange: function* (lowestKey, lowestInclusive, highestKey, highestInclusive) {
// NOTE: Range requests in lmdb are start-inclusive but end-exclusive
yield* instance.getRange({ start: lowestKey, end: highestKey })
.filter(({ key }) => {
if (!lowestInclusive && lowestKey.equals(key)) {
return false;
} else {
return true;
}
});
if (highestInclusive && instance.doesExist(highestKey)) {
yield { key: highestKey, value: instance.get(highestKey) };
}
return;
},
runInTransaction: function (callback) {
}
};
};

75
src/backend/memory.js

@ -0,0 +1,75 @@
"use strict";
const sortedBtree = require("sorted-btree").default;
const { toBigint, toBuffer } = require("../storage-encoder/bigint/buffer");
// TODO: Implement specific safety behaviours:
// - consistent view during range iteration (via shallow clone?)
// - consistent view during transactions (via shallow clone?)
module.exports = function createMemoryBackend(_options) {
let store = new sortedBtree();
return {
getKey: function (key) {
// return value, or throw if non-existent
return store.get(toBigint(key));
},
putKey: function (key, value) {
// TODO: compare-and-set in API?
store.set(toBigint(key), value);
},
getKeyRange: function* (lowestKey, lowestInclusive, highestKey, highestInclusive) {
let $lowestKey = toBigint(lowestKey);
let $highestKey = toBigint(highestKey);
// inclusive on both ends! return iterator/stream of {key,value} pairs
let firstValue = (lowestInclusive)
? store.get($lowestKey)
: undefined; // FIXME: Check .has instead
if (firstValue != null) {
yield { key: lowestKey, value: firstValue };
}
let currentKey = $lowestKey;
while (true) {
let pair = store.nextHigherPair(currentKey);
if (pair != null) {
let [ key, value ] = pair;
if (
(key === $highestKey && !highestInclusive)
|| (key > $highestKey)
) {
break;
} else {
currentKey = key;
yield { key: toBuffer(key), value: value };
}
} else {
// We've run out of items
break;
}
}
},
runInTransaction: function (callback) {
}
};
};
/*
storeRecord(collection, id)
deleteRecord(collection, id)
hasRecord(collection, id)
fetchRecord(collection, id)
fetchRecords(collection, lowestID, highestID)
addToIndex(index, id)
removeFromIndex(index, id)
fetchIndexRecord(index, id)
fetchIndexRecords(index, lowestID, highestID)
*/

140
src/backend/test-memory.js

@ -0,0 +1,140 @@
"use strict";
const util = require("util");
const lmdb = require("lmdb");
const mapObj = require("map-obj");
const createMemoryBackend = require("./memory");
const createLMDBBackend = require("./lmdb");
const reduceMigrations = require("../schema/reducer");
const createRecordCoder = require("../storage-encoder/record-coder");
function stringifyIterator(iterator) {
// return "[" + Array.from(iterator).join(", ") + "]";
return util.inspect(Array.from(iterator).map((item) => item.value), { colors: true });
// return util.inspect(Array.from(iterator));
}
function buf(number) {
return Buffer.from([ number ]);
}
// let backendChoice = "memory";
let backendChoice = "lmdb";
let dummyMigrations = [
{ id: 1, operations: [
{ type: "createCollection", name: "users", operations: [
{ type: "createField", name: "username", operations: [
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "passwordHash", operations: [
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "emailAddress", operations: [
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: false }
]},
{ type: "createField", name: "isActive", operations: [
{ type: "setFieldType", fieldType: "boolean" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "registrationDate", operations: [
{ type: "setFieldType", fieldType: "date" },
{ type: "setAttribute", attribute: "required", value: true },
{ type: "setAttribute", attribute: "withTimezone", value: false },
]},
{ type: "createField", name: "invitesLeft", operations: [
{ type: "setFieldType", fieldType: "integer" },
{ type: "setAttribute", attribute: "required", value: true },
]},
]}
]},
{ id: 2, operations: [
{ type: "modifyCollection", name: "users", operations: [
{ type: "modifyField", name: "emailAddress", operations: [
{ type: "setAttribute", attribute: "required", value: true },
]},
// FIXME: Disallow no-ops for attribute changes?
// { type: "modifyField", name: "isActive", operations: [
// { type: "setAttribute", attribute: "required", value: true },
// ]},
{ type: "modifyField", name: "registrationDate", operations: [
{ type: "setAttribute", attribute: "withTimezone", value: true },
{ type: "rollbackTo", transformer: (value) => value.toUTC() }
]},
{ type: "modifyField", name: "invitesLeft", operations: [
{ type: "setAttribute", attribute: "signed", value: false },
]},
{ type: "createField", name: "sendNewsletter", operations: [
{ type: "setFieldType", fieldType: "boolean" },
{ type: "setAttribute", attribute: "required", value: true }, // FIXME: Enforce a default in this case! Otherwise existing columns would be invalid -- actually this should be handled by 'migration defaults' specifically, without requiring a default for new records
// FIXME: The below lazy function is currently getting evaluated at schema reduce time, because of the immutable deep merge. *Really* need to work this into merge-by-template instead to prevent cases like this!
{ type: "setAttribute", attribute: "defaultValue", value: () => false }, // FIXME: Always specified as a value-producing function, or also allow literals?
]},
]}
]},
];
let schema = reduceMigrations(dummyMigrations);
console.dir({schema}, {depth:null});
function createRecord(collectionName, data) {
let collectionSchema = schema.collections[collectionName];
let fields = mapObj(collectionSchema.fields, (key, value) => {
return [
key,
value.schema // Extract only the *current* schema, not the transforms
];
});
let coder = createRecordCoder(fields);
return coder.encode(data);
}
// console.log(schema);
// MARKER: createRecord takes a schema *array*, because fields need to have a well-defined order for consistent encoding. Need to convert from object format to array format, and also update recordCoder so that it can deal with the new internal schema representation format (eg. no longer a nested `attributes` object)
console.log(createRecord("users", {
username: "joepie91",
passwordHash: "foo",
emailAddress: "admin@cryto.net",
isActive: true,
registrationDate: new Date(),
invitesLeft: 10
}));
return;
(async function() {
// FIXME: match-value
let db = lmdb.open({ keyEncoding: "binary" });
let backend = (backendChoice === "lmdb")
? createLMDBBackend(db)
: createMemoryBackend();
await backend.putKey(buf(1), "one");
await backend.putKey(buf(2), "two");
await backend.putKey(buf(3), "three");
await backend.putKey(buf(4), "four");
await backend.putKey(buf(5), "five");
await backend.putKey(buf(6), "six");
await backend.putKey(buf(7), "seven");
await backend.putKey(buf(8), "eight");
await backend.putKey(buf(9), "nine");
await backend.putKey(buf(10), "ten");
console.log(backend.getKey(buf(4)));
console.log(Array.from(db.getRange()));
console.log(Array.from(db.getRange({ start: buf(2), end: buf(6) })));
console.log(stringifyIterator(backend.getKeyRange(buf(2), true, buf(6), true)));
console.log(stringifyIterator(backend.getKeyRange(buf(2), false, buf(6), true)));
console.log(stringifyIterator(backend.getKeyRange(buf(2), true, buf(6), false)));
console.log(stringifyIterator(backend.getKeyRange(buf(2), false, buf(6), false)));
})();

12
src/construct-key.js

@ -0,0 +1,12 @@
"use strict";
// FIXME: Ensure that this separator is escaped everywhere else
const SEPARATOR = Buffer.from(":");
module.exports = {
objectID: (collection, id) => Buffer.concat([
Buffer.from(collection),
SEPARATOR,
id
])
};

2
src/packages/immutable-deep-merge/index.js

@ -22,7 +22,7 @@ module.exports = function immutableDeepMerge(object1, object2) {
? value(originalValue)
: value;
if (typeof normalizedValue === "object" && normalizedValue !== null) {
if (typeof normalizedValue === "object" && !Array.isArray(normalizedValue) && normalizedValue !== null) {
// NOTE: We default to an empty object for the original value because from the perspective of a deep-merge, any nested paths required by the new input that don't exist in the original input should be imagined into existence.
transformedValue = immutableDeepMerge(originalValue ?? {}, normalizedValue);
} else {

411
src/schema/reducer.js

@ -5,7 +5,9 @@ const assert = require("assert");
const matchValue = require("match-value");
const splitFilterN = require("split-filter-n");
const unreachable = require("@joepie91/unreachable")("zapdb");
const mergeByTemplate = require("merge-by-template");
const immutableDeepMerge = require("../packages/immutable-deep-merge");
const util = require("util");
const rules = require("./rules");
const computeTransform = require("./compute-transform");
@ -15,35 +17,92 @@ const compose = require("../util/compose");
// FIXME: replace asserts with proper checks and error messages
// TODO: Find a way to roll this into merge-by-template somehow? The main difference is specifying dynamic transforms at rule definition time (and needing to use meta-objects in the mergeable) vs. specifying dynamic transforms at merge time directly
// TODO: Add API for "set this object literally, no merge"
// TODO: Add API for "set this object literally, no merge" -- is that actually necessary, can't we just have a transform function that only returns the new value directly?
// FIXME: Find a way to support arrays? Particularly objects *within* arrays, which would also need to be merged recursively...
// FIXME: requiresMigrationDefault and requiredAttributes validation, as well as verifying that a type has actually been set by the end of a createField operation
// FIXME: Track exactly which data would be lost upon a rollback so that the user can be asked for confirmation first
function checkTransforms(operations) {
let byType = splitFilterN(operations, null, (operation) => operation.type);
function createTransformComputer() {
let automaticTransformers = [];
let requiredTransformers = [];
if (byType.transformTo != null && byType.transformTo.length > 1) {
// FIXME: Error code
throw new Error(`Only one transformTo can be specified per modified field`);
}
if (byType.rollbackTo != null && byType.rollbackTo.length > 1) {
// FIXME: Error code
throw new Error(`Only one rollbackTo can be specified per modified field`);
}
if (byType.rollbackTo != null && byType.forbidRollback != null) {
// FIXME: Error code
throw new Error(`Cannot specify both a rollbackTo and an unsafeForbidRollback`);
}
return {
changeType: function (oldType, newType) {
if (oldType == null || newType == null) {
// We're setting a type for this field for the first time (which should only happen during field creation). This also applies to the inverse computation for rollbacks
return;
} else {
let automatic = rules.types[oldType].losslessConversionTo[newType];
if (automatic != null) {
automaticTransformers.push(automatic);
} else {
requiredTransformers.push({ type: "type", oldType, newType });
}
}
},
changeAttribute: function (attribute, oldValue, newValue) {
if (oldValue == null || newValue == null) {
// We're setting this attribute on this field for the first time (which should only happen during field creation). This also applies to inverse computation for rollbacks.
// NOTE: Even if a field is not required, it should always be initialized during field creation, using an implicit operation setting the default, so a legitimate revert to `undefined` should never be possible.
// FIXME: How to deal with this when a new attribute is introduced in a new schema DSL version? Should we just pretend that the old one always existed, by updating the old DSL implementation to insert it implicitly as a default? Probably should.
return;
} else {
let canBeAutomatic = rules.attributes[attribute].isLossless(oldValue, newValue);
let hasRollbackTransform = (byType.rollbackTo != null);
let hasRollbackProhibition = (byType.forbidRollback != null);
if (canBeAutomatic) {
automaticTransformers.push(rules.attributes[attribute].losslessTransformer);
} else {
requiredTransformers.push({ type: "attribute", attribute, oldValue, newValue });
}
}
},
getResults: function (manualTransformer, isForward, fieldName) {
// NOTE: We disallow transformTo/rollbackTo when they are not required; if the user wishes to bulk-transform values, they should specify a changeRecords operation instead (which should probably require the user to specify both the forward and backward transform?). Otherwise, we cannot implement "maybe you forgot a rollbackTo" errors, because that's only actually an error when a transform isn't *required*, and so if a user 'overreaches' in their type transform to also do a value transform we can't detect missing corresponding rollbackTo logic.
// NOTE: There are deliberately duplicate conditional clauses in here to improve readability!
if (requiredTransformers.length === 0 && automaticTransformers.length === 0) {
if (manualTransformer == null) {
// Identity function; no changes were made that affect the value itself
return (value) => value;
} else {
// FIXME: Better error message
let operationName = (isForward) ? "transformTo" : "rollbackTo";
throw new Error(`A ${operationName} operation was specified, but no other schema changes require one. Maybe you meant to use updateRecords instead?`);
}
} else if (requiredTransformers.length === 0 && automaticTransformers.length > 0) {
return compose(automaticTransformers);
} else if (requiredTransformers.length > 0) {
if (manualTransformer == null) {
// FIXME: Have some sort of error templating abstracted out instead
let causes = requiredTransformers
.map((cause) => {
if (cause.type === "type") {
return ` - Field type changed from '${cause.oldType}' to '${cause.newType}'`;
} else if (cause.type === "attribute") {
let from = (isForward) ? cause.oldValue : cause.newValue;
let to = (isForward) ? cause.newValue : cause.oldValue;
return ` - Attribute '${cause.attribute}' changed from '${util.inspect(from)}' to '${util.inspect(to)}'`;
} else {
throw unreachable("Unrecognized cause type");
}
})
.join("\n");
let errorMessage = (isForward)
? `One or more schema changes for '${fieldName}' cannot be applied automatically, because existing data would lose precision. You need to specify a transformTo operation manually.`
: `One or more schema changes for '${fieldName}' cannot be applied automatically, because rolling back the migration would cause data to lose precision. You need to specify a rollbackTo operation manually.`
return {
hasTransform: (byType.transformTo != null),
hasRollback: (hasRollbackTransform || hasRollbackProhibition),
hasRollbackTransform: hasRollbackTransform,
hasRollbackProhibition: hasRollbackProhibition
// FIXME: Better error message
throw new Error(`${errorMessage}\n\nCaused by:\n${causes}`);
} else {
// TODO: Is this the correct order, always letting the manual transformer act on the original value rather than the one post automatic transforms? Probably is, if we want automatic transforms to be transparent to the user (and not produce a leaky abstraction)
return compose([ manualTransformer, ... automaticTransformers ]);
}
} else {
throw unreachable("Impossible condition");
}
}
};
}
@ -63,193 +122,98 @@ function changeType(schema, newType) {
return newSchema;
} else {
throw new Error(`Tried to set field type to '${operation.fieldType}', but that is already the type`);
throw new Error(`Tried to set field type to '${newType}', but that is already the type`);
}
}
function applyFieldOperations(currentField = {}, operations) {
// Things that are specific to this migration
let state = {
schema: { ... currentField }, // Clone for local mutation
forwardTransform: null,
backwardTransform: null,
transformsRequired: false,
rollbackForbidden: false,
changedAttributes: []
};
// FIXME: Track destructive forward migrations *and* rollbacks, and outside of dev mode require the user to confirm with eg. an --allow-destructive-rollbacks that this is okay to apply
function applyFieldOperations(fieldName, currentField = {}, operations) {
// { schema, forwardTransform, backwardTransform, transformsRequired, rollbackForbidden }
let schema = currentField.schema ?? {};
let lastForwardTransforms = currentField.forwardTransforms ?? [];
let lastBackwardTransforms = currentField.backwardTransforms ?? [];
let rollbackForbidden = currentField.rollbackForbidden ?? false; // FIXME: Actually use this as a safeguard in migrations
let forwardTransformComputer = createTransformComputer();
let backwardTransformComputer = createTransformComputer();
let explicitForwardTransform = null;
let explicitBackwardTransform = null;
for (let operation of operations) {
matchValue(operation.type, {
setFieldType: () => {
// NOTE: This is separated out into a function because a bunch of complexity is needed for determining which attributes can be kept
state.schema = changeType(state.schema, operation.fieldType);
state.transformsRequired = true;
forwardTransformComputer.changeType(schema.type, operation.fieldType);
backwardTransformComputer.changeType(operation.fieldType, schema.type);
// NOTE: The logic for this is separated out into its own function because a bunch of complexity is needed for determining which attributes can be kept
schema = changeType(schema, operation.fieldType);
},
setAttribute: () => {
if (state.schema[operation.attribute] !== operation.value) {
state.changedAttributes.push(operation.attribute);
state.schema[operation.attribute] = operation.value;
state.transformsRequired = true;
if (schema[operation.attribute] !== operation.value) {
let currentValue = schema[operation.attribute];
forwardTransformComputer.changeAttribute(operation.attribute, currentValue, operation.value);
backwardTransformComputer.changeAttribute(operation.attribute, operation.value, currentValue);
schema = { ... schema, [operation.attribute]: operation.value };
} else {
// FIXME: Error quality
throw new Error(`Tried to change '${operation.attribute}' attribute to '${operation.value}', but it's already set to that`);
}
},
transformTo: () => {
if (state.forwardTransform == null) {
state.forwardTransform = operation.transformer;
if (explicitForwardTransform == null) {
explicitForwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one transformTo per field per migration`);
}
},
rollbackTo: () => {
if (state.backwardTransform == null) {
state.backwardTransform = operation.transformer;
if (explicitBackwardTransform == null) {
explicitBackwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one rollbackTo per field per migration`);
}
},
forbidRollback: () => {
state.rollbackForbidden = true;
},
rollbackForbidden = true;
}
// TODO: rest of operations
});
}
function createTransformComputer() {
let automaticTransformers = [];
let requiredTransformers = [];
let forwardTransform = forwardTransformComputer.getResults(explicitForwardTransform, true, fieldName);
let backwardTransform = backwardTransformComputer.getResults(explicitBackwardTransform, false, fieldName);
return {
changeType: function (oldType, newType) {
let automatic = rules.types[oldType].losslessConversionTo[newType];
if (automatic != null) {
automaticTransformers.push(automatic);
} else {
requiredTransformers.push({ type: "type", oldType, newType });
}
},
changeAttribute: function (attribute, oldValue, newValue) {
let canBeAutomatic = rules.attributes[attribute].isLossless(oldValue, newValue);
if (canBeAutomatic) {
automaticTransformers.push(rules.attributes[attribute].losslessTransformer);
} else {
requiredTransformers.push({ type: "attribute", attribute, oldValue, newValue });
}
},
getResults: function (manualTransformer, operationName) {
// NOTE: There are deliberately duplicate conditional clauses in here to improve readability!
if (requiredTransformers.length === 0 && automaticTransformers.length === 0) {
if (manualTransformer == null) {
// Identity function; no changes were made that affect the value itself
return (value) => value;
} else {
// FIXME: Better error message
throw new Error(`A ${operationName} operation was specified, but no other schema changes require one. Maybe you meant to use updateRecords instead?`);
}
} else if (requiredTransformers.length === 0 && automaticTransformers.length > 0) {
return compose(automaticTransformers);
} else if (requiredTransformers.length > 0) {
// FIXME: Better error message
throw new Error(`One or more schema changes can't be automatically applied, because a lossless automatic conversion of existing values is not possible; you need to specify a ${operationName} operation manually`);
} else {
throw unreachable("Impossible condition");
}
}
};
}
// NOTE: We disallow transformTo/rollbackTo when they are not required; if the user wishes to bulk-transform values, they should specify a changeRecords operation instead. Otherwise, we cannot implement "maybe you forgot a rollbackTo" errors, because that's only actually an error when a transform isn't *required*, and so if a user 'overreaches' in their type transform to also do a value transform we can't detect missing corresponding rollbackTo logic.
if (transformsRequired) {
let forwardTransformers = { automatic: [], required: [] };
let backwardTransformers = { automatic: [], required: [] };
function addTransformer(collection, automaticTransformer, marker) {
if (automaticTransformer != null) {
collection.automatic.push(automaticTransformer);
} else {
collection.required.push(marker);
}
}
let oldType = currentField.type;
let newType = state.schema.type;
let transformers = computeTransform.type(oldType, newType);
addTransformer(forwardTransformers, transformers.forward, { type: "type" });
addTransformer(backwardTransformers, transformers.backward, { type: "type" });
// FIXME: Currently this implementation assumes that *all* possible attributes are required, and it doesn't deal with cases where the attribute is currently unset. That needs to be changed, especially because new attributes can be changed in later versions of the schema builder, which older migrations won't be using.
// TODO/QUESTION: Maybe all attributes should just be given a default instead of being required? Otherwise over time there'll be a mix of required and optional attributes, the requiredness being determined solely by when the attribute was added to the query builder...
for (let attribute of state.changedAttributes) {
let oldValue = currentField[attribute];
let newValue = state.schema[attribute];
let transformers = computeTransform.attribute(attribute, oldValue, newValue);
addTransformer(forwardTransformers, transformers.forward, { type: "attribute", attribute: attribute });
addTransformer(backwardTransformers, transformers.backward, { type: "attribute", attribute: attribute });
}
if (forwardTransformers.required.length > 0 && state.forwardTransform == null) {
// FIXME: Error quality, list the specific reasons
throw new Error(`One or more schema changes require you to specify a transformTo operation`);
} else {
state.forwardTransform = compose(forwardTransformers.automatic);
}
if (backwardTransformers.required.length > 0 && state.backwardTransform == null) {
// FIXME: Error quality, list the specific reasons
throw new Error(`One or more schema changes require you to specify a rollbackTo operation`);
} else {
state.backwardTransform = compose(backwardTransformers.automatic);
}
} else {
if (state.forwardTransform != null || state.backwardTransform != null) {
// FIXME: Error quality and in-depth explanation
throw new Error(`You cannot specify a transformTo or rollbackTo operation unless a field type change requires it. Maybe you meant to use changeRecords instead?`);
// FIXME: modifyRecords instead of changeRecords? For consistency with other APIs
}
}
return state;
return {
schema: schema,
forwardTransforms: lastForwardTransforms.concat([ forwardTransform ]),
backwardTransforms: lastBackwardTransforms.concat([ backwardTransform ]),
rollbackForbidden: rollbackForbidden,
};
}
function tableOperationReducer(table, operation) {
function collectionOperationReducer(collection, operation) {
return matchValue(operation.type, {
createField: () => immutableDeepMerge(table, {
createField: () => immutableDeepMerge(collection, {
fields: {
[operation.name]: (field) => {
assert(field === undefined);
let { type, name, ... props } = operation;
return props;
assert(field === undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, {}, operation.operations);
}
}
}),
setFieldAttributes: () => immutableDeepMerge(table, {
modifyField: () => immutableDeepMerge(collection, {
fields: {
[operation.name]: (field) => {
assert(field !== undefined);
let { type, name, ... props } = operation;
// TODO: Improve readability here
return {
... field,
... props,
attributes: {
... field.attributes,
... props.attributes
}
};
assert(field !== undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, field, operation.operations);
}
}
}),
addIndex: () => immutableDeepMerge(table, {
addIndex: () => immutableDeepMerge(collection, {
indexes: {
[operation.name]: operation.definition
}
@ -260,18 +224,18 @@ function tableOperationReducer(table, operation) {
function schemaOperationReducer(schema, operation) {
return matchValue(operation.type, {
createCollection: () => immutableDeepMerge(schema, {
tables: {
[operation.name]: (table) => {
assert(table === undefined);
return operation.operations.reduce(tableOperationReducer, {});
collections: {
[operation.name]: (collection) => {
assert(collection === undefined); // FIXME: Produce a useful error here
return operation.operations.reduce(collectionOperationReducer, {});
}
}
}),
modifyCollection: () => immutableDeepMerge(schema, {
tables: {
[operation.name]: (table) => {
assert(table !== undefined);
return operation.operations.reduce(tableOperationReducer, table);
collections: {
[operation.name]: (collection) => {
assert(collection !== undefined); // FIXME: Produce a useful error here
return operation.operations.reduce(collectionOperationReducer, collection);
}
}
}),
@ -285,8 +249,77 @@ module.exports = function reduceMigrations(migrationList, initial = {}) {
return migrationList.reduce((lastSchema, migration) => {
return migration.operations.reduce(schemaOperationReducer, lastSchema);
}, initial);
/* MARKER: Turn the resulting migration tree into 3 different views:
- final schema form, for encoding new entries
- structural changes, eg. newly created collections that need to be registered in some way before records are even generated for them
- record transformations, ie. a definition of how records in older schema versions need to have their values/structure transformed to conform to the new schema
Also think about how to deal with index roll-over, eg. a new runtime-generated index because of changing tzdb data
*/
};
let mergeMigrations = mergeByTemplate.createMerger({
collections: mergeByTemplate.anyProperty({
fields: mergeByTemplate.anyProperty((a, b) => {
})
})
});
/*
{
collections: {
users: {
fields: {
username: {
schema: { type: 'string', required: true },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
},
passwordHash: {
schema: { type: 'string', required: true },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
},
emailAddress: {
schema: { type: 'string', required: true },
forwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
rollbackForbidden: false
},
isActive: {
schema: { type: 'boolean', required: true },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
},
registrationDate: {
schema: { type: 'date', required: true, withTimezone: true },
forwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
rollbackForbidden: false
},
invitesLeft: {
schema: { type: 'integer', required: true, signed: false },
forwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
rollbackForbidden: false
},
sendNewsletter: {
schema: { type: 'boolean', required: true, defaultValue: false },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
}
}
}
}
}
*/
// let dummyMigrations = [
// { id: 1, operations: [
// { type: "createCollection", name: "users", operations: [
@ -312,28 +345,28 @@ let dummyMigrations = [
{ id: 1, operations: [
{ type: "createCollection", name: "users", operations: [
{ type: "createField", name: "username", operations: [
{ type: "changeType", fieldType: "string" },
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "passwordHash", operations: [
{ type: "changeType", fieldType: "string" },
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "emailAddress", operations: [
{ type: "changeType", fieldType: "string" },
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: false }
]},
{ type: "createField", name: "isActive", operations: [
{ type: "changeType", fieldType: "boolean" },
{ type: "setFieldType", fieldType: "boolean" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "registrationDate", operations: [
{ type: "changeType", fieldType: "date" },
{ type: "setFieldType", fieldType: "date" },
{ type: "setAttribute", attribute: "required", value: true },
{ type: "setAttribute", attribute: "withTimezone", value: false },
]},
{ type: "createField", name: "invitesLeft", operations: [
{ type: "changeType", fieldType: "integer" },
{ type: "setFieldType", fieldType: "integer" },
{ type: "setAttribute", attribute: "required", value: true },
]},
]}
@ -341,25 +374,27 @@ let dummyMigrations = [
{ id: 2, operations: [
{ type: "modifyCollection", name: "users", operations: [
{ type: "modifyField", name: "emailAddress", operations: [
{ type: "setAttribute", attribute: "required", value: false },
]},
// FIXME: Disallow no-ops for attribute changes?
{ type: "modifyField", name: "isActive", operations: [
{ type: "setAttribute", attribute: "required", value: true },
]},
// FIXME: Disallow no-ops for attribute changes?
// { type: "modifyField", name: "isActive", operations: [
// { type: "setAttribute", attribute: "required", value: true },
// ]},
{ type: "modifyField", name: "registrationDate", operations: [
{ type: "setAttribute", attribute: "withTimezone", value: true },
{ type: "rollbackTo", transformer: (value) => value.toUTC() }
]},
{ type: "modifyField", name: "invitesLeft", operations: [
{ type: "setAttribute", attribute: "signed", value: false },
]},
{ type: "createField", name: "sendNewsletter", operations: [
{ type: "changeType", fieldType: "boolean" },
{ type: "setAttribute", attribute: "required", value: true }, // FIXME: Enforce a default in this case! Otherwise existing columns would be invalid
{ type: "setDefault", value: () => false }, // FIXME: Always specified as a value-producing function, or also allow literals?
{ type: "setFieldType", fieldType: "boolean" },
{ type: "setAttribute", attribute: "required", value: true }, // FIXME: Enforce a default in this case! Otherwise existing columns would be invalid -- actually this should be handled by 'migration defaults' specifically, without requiring a default for new records
// FIXME: The below lazy function is currently getting evaluated at schema reduce time, because of the immutable deep merge. *Really* need to work this into merge-by-template instead to prevent cases like this!
{ type: "setAttribute", attribute: "defaultValue", value: () => false }, // FIXME: Always specified as a value-producing function, or also allow literals?
]},
]}
]},
];
// console.dir(module.exports(dummyMigrations), { depth: null });
console.dir(module.exports(dummyMigrations), { depth: null });

12
src/schema/rules.js

@ -2,6 +2,9 @@
const lookupTimezoneName = require("../lookup-timezone-name");
// CAUTION: Once a type is defined here and that version of the code is released, no new requiredAttributes can be added to that type! Otherwise it would be possible for an old migration (from before that change) to become retroactively invalid upon an update of zapdb, just because it was written against an older ruleset. This cannot be fixed by statefully tracking what migrations were *once* considered 'valid', either; all state is kept in the database instance of a specific deployment, which means that it would only be considered valid for deployments created prior to the zapdb upgrade - making database deployment non-deterministic, as a given migration would be considered valid on one system but invalid on another. Bottom line: we can never retroactively invalidate existing migrations.
// TODO: Consider using schema API versions as a workaround for this, and changing the rules format to specify since which version an attribute is required, providing a default for the alternate case. Question is whether by this point it's really useful to even specify new required fields *at all*, or whether we can get away with just specifying defaults.
module.exports = {
// Note that this mapping can be used to determine the losslessness of both forward and backward migrations!
types: {
@ -47,7 +50,7 @@ module.exports = {
validForTypes: new Set([ "decimal" ]),
isLossless: (oldSetting, newSetting) => (newSetting > oldSetting),
losslessTransformer: (value) => value, // No change to value
requiresMigrationDefault: false
requiresMigrationDefault: false // Whether setting this option causes a 'migration default' to become required to specify? Need to figure out what this was supposed to be for
},
signed: {
validForTypes: new Set([ "decimal", "integer" ]),
@ -67,8 +70,13 @@ module.exports = {
required: {
// Valid for all types
validForTypes: true,
isLossless: true,
isLossless: () => true,
requiresMigrationDefault: true
},
defaultValue: { // For newly inserted rows, not necessarily for migrated rows! See notes.txt
validForTypes: true,
isLossless: () => true,
requiresMigrationDefault: false
}
},
operations: {

7
src/util/compose.js

@ -2,6 +2,11 @@
module.exports = function compose(funcs) {
return function (value) {
return funcs.reduce((last, func) => func(last), value);
if (funcs.length > 0) {
return funcs.reduce((last, func) => func(last), value);
} else {
// Identity function
return (value) => value;
}
};
};

948
yarn.lock

File diff suppressed because it is too large
Loading…
Cancel
Save