Browse Source

Hopefully last rewrite of migration evaluator

master
Sven Slootweg 3 months ago
parent
commit
d95d8dc1a9
  1. 22
      src/packages/tree-to-operations/index.js
  2. 21
      src/schema/compute-transform.js
  3. 176
      src/schema/evaluator/index.js
  4. 68
      src/schema/evaluator/operation-tree-rules.js
  5. 0
      src/schema/evaluator/schema-rules.js
  6. 89
      src/schema/evaluator/transform-computer.js
  7. 400
      src/schema/reducer.js
  8. 12
      yarn.lock

22
src/packages/tree-to-operations/index.js

@ -0,0 +1,22 @@
"use strict";
const assert = require("assert");
const assureArray = require("assure-array");
function doConversionForLevel(structure, operations, path) {
return operations.flatMap((operation) => {
assert(operation.type != null);
let handlers = structure[operation.type];
assert(handlers != null); // FIXME: Better error reporting when this occurs, this is a bug
return assureArray(handlers).flatMap((handler) => {
return (typeof handler === "function")
? handler(operation, path)
: doConversionForLevel(handler, operation.operations, path.concat([ operation ]));
});
});
}
module.exports = function treeToOperations(structure, operations) {
return doConversionForLevel(structure, operations, []);
};

21
src/schema/compute-transform.js

@ -1,21 +0,0 @@
"use strict";
const rules = require("./rules");
module.exports = {
type: function (oldType, newType) {
return {
forward: rules.types[oldType].losslessConversionTo[newType],
backward: rules.types[newType].losslessConversionTo[oldType],
};
},
attribute: function (attribute, oldValue, newValue) {
let canTransformForward = rules.attributes[attribute].isLossless(oldValue, newValue);
let canTransformBackward = rules.attributes[attribute].isLossless(newValue, oldValue);
return {
forward: (canTransformForward) ? rules.attributes[attribute].losslessTransformer : undefined,
backward: (canTransformBackward) ? rules.attributes[attribute].losslessTransformer : undefined,
};
}
};

176
src/schema/evaluator/index.js

@ -0,0 +1,176 @@
/* eslint-disable no-loop-func */
"use strict";
const assert = require("assert");
const matchValue = require("match-value");
const immutableDeepMerge = require("../../packages/immutable-deep-merge");
const treeToOperations = require("../../packages/tree-to-operations");
const schemaRules = require("./schema-rules");
const operationTreeRules = require("./operation-tree-rules");
const createTransformComputer = require("./transform-computer");
// FIXME: table/row terminology etc.
// FIXME: replace asserts with proper checks and error messages
// TODO: We're no longer going to use merge-by-template, but we still need to deal with the issue where function literals are treated like lazy values in deep immutable merges
// TODO: Add API for "set this object literally, no merge" -- is that actually necessary, can't we just have a transform function that only returns the new value directly?
// FIXME: Find a way to support arrays? Particularly objects *within* arrays, which would also need to be merged recursively...
// FIXME: requiresMigrationDefault and requiredAttributes validation, as well as verifying that a type has actually been set by the end of a createField operation
// FIXME: Track exactly which data would be lost upon a rollback so that the user can be asked for confirmation first
// FIXME: Throw an error if a non-required transformTo is specified without a corresponding rollbackTo
function changeType(schema, newType) {
if (schema.type != newType) {
let newSchema = { type: newType };
for (let attribute of Object.keys(schema)) {
if (attribute === "type" || !schemaRules.attributes[attribute].validForTypes.has(newType)) {
continue;
} else {
newSchema[attribute] = schema[attribute];
}
}
return newSchema;
} else {
throw new Error(`Tried to set field type to '${newType}', but that is already the type`);
}
}
// FIXME: Track destructive forward migrations *and* rollbacks, and outside of dev mode require the user to confirm with eg. an --allow-destructive-rollbacks that this is okay to apply
function applyFieldOperations(fieldName, currentField = {}, operations) {
let schema = currentField.schema ?? {};
let lastForwardTransforms = currentField.forwardTransforms ?? [];
let lastBackwardTransforms = currentField.backwardTransforms ?? [];
let rollbackForbidden = currentField.rollbackForbidden ?? false; // FIXME: Actually use this as a safeguard in migrations
let forwardTransformComputer = createTransformComputer();
let backwardTransformComputer = createTransformComputer();
let explicitForwardTransform = null;
let explicitBackwardTransform = null;
for (let operation of operations) {
matchValue(operation.type, {
setFieldType: () => {
forwardTransformComputer.changeType(schema.type, operation.fieldType);
backwardTransformComputer.changeType(operation.fieldType, schema.type);
// NOTE: The logic for this is separated out into its own function because a bunch of complexity is needed for determining which attributes can be kept
schema = changeType(schema, operation.fieldType);
},
setAttribute: () => {
if (schema[operation.attribute] !== operation.value) {
let currentValue = schema[operation.attribute];
forwardTransformComputer.changeAttribute(operation.attribute, currentValue, operation.value);
backwardTransformComputer.changeAttribute(operation.attribute, operation.value, currentValue);
schema = { ... schema, [operation.attribute]: operation.value };
} else {
// FIXME: Error quality
throw new Error(`Tried to change '${operation.attribute}' attribute to '${operation.value}', but it's already set to that`);
}
},
transformTo: () => {
if (explicitForwardTransform == null) {
explicitForwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one transformTo per field per migration`);
}
},
rollbackTo: () => {
if (explicitBackwardTransform == null) {
explicitBackwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one rollbackTo per field per migration`);
}
},
forbidRollback: () => {
rollbackForbidden = true;
}
// TODO: rest of operations
});
}
let forwardTransform = forwardTransformComputer.getResults(explicitForwardTransform, true, fieldName);
let backwardTransform = backwardTransformComputer.getResults(explicitBackwardTransform, false, fieldName);
return {
schema: schema,
forwardTransforms: lastForwardTransforms.concat([ forwardTransform ]),
backwardTransforms: lastBackwardTransforms.concat([ backwardTransform ]),
rollbackForbidden: rollbackForbidden,
};
}
module.exports = function evaluateMigrations(migrationList, initial = {}) {
/* NOTE: This function isn't always called with the *full* set of migrations from the start; it can also be called with just a subset of them, to determine the exact transformations from one given schema revision to another.
De facto, the evaluated schema resulting from this function is divided into two parts:
1. The absolute schema; this defines what the final total schema looks like, and must always consider *all* migrations from the very start, even when only evaluating a subset.
2. The schema delta; this describes the (data) mutations introduced by this specific subset of migrations, and *not* those of previous migrations. This still requires the previous absolute schema to function, otherwise it would not be possible to determine the data mutations caused by the first migration in the evaluated set.
In practice, this means that we initialize the merge operation with a copy of the schema as it existed prior to this evaluated set - but *only* the absolute component of that schema, not the schema delta. Then after evaluation, the schema delta will only contain those transformations introduced by this subset.
// FIXME: Actually do the initializing stuff described here, lol
*/
let operations = treeToOperations(operationTreeRules, migrationList);
return operations.reduce((last, operation) => {
return matchValue(operation.type, {
createCollection: () => immutableDeepMerge(last, {
collections: {
[operation.name]: (collection) => {
assert(collection === undefined); // FIXME: Produce a useful error here
return {};
}
}
}),
deleteCollection: () => {
throw new Error(`Not implemented yet`);
},
createField: () => immutableDeepMerge(last, {
collections: {
[operation.collection]: {
fields: {
[operation.name]: (field) => {
assert(field === undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, {}, operation.operations);
}
}
}
}
}),
modifyField: () => immutableDeepMerge(last, {
collections: {
[operation.collection]: {
fields: {
[operation.name]: (field) => {
assert(field !== undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, field, operation.operations);
}
}
}
}
}),
deleteField: () => {
throw new Error(`Not implemented yet`);
},
createIndex: () => {
throw new Error(`Not implemented yet`);
},
deleteIndex: () => {
throw new Error(`Not implemented yet`);
}
});
}, initial);
/* MARKER: Turn the resulting migration tree into 3 different views:
- final schema form, for encoding new entries
- structural changes, eg. newly created collections that need to be registered in some way before records are even generated for them
- record transformations, ie. a definition of how records in older schema versions need to have their values/structure transformed to conform to the new schema
Also think about how to deal with index roll-over, eg. a new runtime-generated index because of changing tzdb data
*/
};

68
src/schema/evaluator/operation-tree-rules.js

@ -0,0 +1,68 @@
"use strict";
module.exports = {
migration: {
createCollection: [ operationCreateCollection, {
createField: operationCreateField,
createIndex: operationCreateIndex
}],
modifyCollection: {
createField: operationCreateField,
modifyField: operationModifyField,
deleteField: operationDeleteField,
createIndex: operationCreateIndex,
deleteIndex: operationDeleteIndex
},
deleteCollection: operationDeleteCollection
}
};
function operationCreateCollection(item) {
return {
type: "createCollection",
name: item.name
};
}
function operationDeleteCollection(item) {
return {
type: "deleteCollection",
name: item.name
};
// TODO: Actually make it delete the data? Maybe have an option in dev to retain 'deleted' collections? Need to figure out security/privacy implications of that, and whether it's even useful to begin with
}
function operationCreateField(item, path) {
return {
type: "createField",
collection: path.at(-1).name,
name: item.name,
operations: item.operations
};
}
function operationModifyField(item, path) {
return {
type: "modifyField",
collection: path.at(-1).name,
name: item.name,
operations: item.operations
};
}
function operationDeleteField(item, path) {
return {
type: "deleteField",
collection: path.at(-1).name,
name: item.name
};
}
function operationCreateIndex(_item, _path) {
throw new Error(`Not implemented yet`);
}
function operationDeleteIndex(_item, _path) {
throw new Error(`Not implemented yet`);
}

0
src/schema/rules.js → src/schema/evaluator/schema-rules.js

89
src/schema/evaluator/transform-computer.js

@ -0,0 +1,89 @@
"use strict";
const unreachable = require("@joepie91/unreachable")("zapdb");
const compose = require("../../util/compose");
const schemaRules = require("./schema-rules");
module.exports = function createTransformComputer() {
let automaticTransformers = [];
let requiredTransformers = [];
return {
changeType: function (oldType, newType) {
if (oldType == null || newType == null) {
// We're setting a type for this field for the first time (which should only happen during field creation). This also applies to the inverse computation for rollbacks
return;
} else {
let automatic = schemaRules.types[oldType].losslessConversionTo[newType];
if (automatic != null) {
automaticTransformers.push(automatic);
} else {
requiredTransformers.push({ type: "type", oldType, newType });
}
}
},
changeAttribute: function (attribute, oldValue, newValue) {
if (oldValue == null || newValue == null) {
// We're setting this attribute on this field for the first time (which should only happen during field creation). This also applies to inverse computation for rollbacks.
// NOTE: Even if a field is not required, it should always be initialized during field creation, using an implicit operation setting the default, so a legitimate revert to `undefined` should never be possible.
// FIXME: How to deal with this when a new attribute is introduced in a new schema DSL version? Should we just pretend that the old one always existed, by updating the old DSL implementation to insert it implicitly as a default? Probably should.
return;
} else {
let canBeAutomatic = schemaRules.attributes[attribute].isLossless(oldValue, newValue);
if (canBeAutomatic) {
automaticTransformers.push(schemaRules.attributes[attribute].losslessTransformer);
} else {
requiredTransformers.push({ type: "attribute", attribute, oldValue, newValue });
}
}
},
getResults: function (manualTransformer, isForward, fieldName) {
// NOTE: We disallow transformTo/rollbackTo when they are not required; if the user wishes to bulk-transform values, they should specify a changeRecords operation instead (which should probably require the user to specify both the forward and backward transform?). Otherwise, we cannot implement "maybe you forgot a rollbackTo" errors, because that's only actually an error when a transform isn't *required*, and so if a user 'overreaches' in their type transform to also do a value transform we can't detect missing corresponding rollbackTo logic.
// NOTE: There are deliberately duplicate conditional clauses in here to improve readability!
if (requiredTransformers.length === 0 && automaticTransformers.length === 0) {
if (manualTransformer == null) {
// Identity function; no changes were made that affect the value itself
return (value) => value;
} else {
// FIXME: Better error message
let operationName = (isForward) ? "transformTo" : "rollbackTo";
throw new Error(`A ${operationName} operation was specified, but no other schema changes require one. Maybe you meant to use updateRecords instead?`);
}
} else if (requiredTransformers.length === 0 && automaticTransformers.length > 0) {
return compose(automaticTransformers);
} else if (requiredTransformers.length > 0) {
if (manualTransformer == null) {
// FIXME: Have some sort of error templating abstracted out instead
let causes = requiredTransformers
.map((cause) => {
if (cause.type === "type") {
return ` - Field type changed from '${cause.oldType}' to '${cause.newType}'`;
} else if (cause.type === "attribute") {
let from = (isForward) ? cause.oldValue : cause.newValue;
let to = (isForward) ? cause.newValue : cause.oldValue;
return ` - Attribute '${cause.attribute}' changed from '${util.inspect(from)}' to '${util.inspect(to)}'`;
} else {
throw unreachable("Unrecognized cause type");
}
})
.join("\n");
let errorMessage = (isForward)
? `One or more schema changes for '${fieldName}' cannot be applied automatically, because existing data would lose precision. You need to specify a transformTo operation manually.`
: `One or more schema changes for '${fieldName}' cannot be applied automatically, because rolling back the migration would cause data to lose precision. You need to specify a rollbackTo operation manually.`
// FIXME: Better error message
throw new Error(`${errorMessage}\n\nCaused by:\n${causes}`);
} else {
// TODO: Is this the correct order, always letting the manual transformer act on the original value rather than the one post automatic transforms? Probably is, if we want automatic transforms to be transparent to the user (and not produce a leaky abstraction)
return compose([ manualTransformer, ... automaticTransformers ]);
}
} else {
throw unreachable("Impossible condition");
}
}
};
};

400
src/schema/reducer.js

@ -1,400 +0,0 @@
/* eslint-disable no-loop-func */
"use strict";
const assert = require("assert");
const matchValue = require("match-value");
const splitFilterN = require("split-filter-n");
const unreachable = require("@joepie91/unreachable")("zapdb");
const mergeByTemplate = require("merge-by-template");
const immutableDeepMerge = require("../packages/immutable-deep-merge");
const util = require("util");
const rules = require("./rules");
const computeTransform = require("./compute-transform");
const compose = require("../util/compose");
// FIXME: table/row terminology etc.
// FIXME: replace asserts with proper checks and error messages
// TODO: Find a way to roll this into merge-by-template somehow? The main difference is specifying dynamic transforms at rule definition time (and needing to use meta-objects in the mergeable) vs. specifying dynamic transforms at merge time directly
// TODO: Add API for "set this object literally, no merge" -- is that actually necessary, can't we just have a transform function that only returns the new value directly?
// FIXME: Find a way to support arrays? Particularly objects *within* arrays, which would also need to be merged recursively...
// FIXME: requiresMigrationDefault and requiredAttributes validation, as well as verifying that a type has actually been set by the end of a createField operation
// FIXME: Track exactly which data would be lost upon a rollback so that the user can be asked for confirmation first
function createTransformComputer() {
let automaticTransformers = [];
let requiredTransformers = [];
return {
changeType: function (oldType, newType) {
if (oldType == null || newType == null) {
// We're setting a type for this field for the first time (which should only happen during field creation). This also applies to the inverse computation for rollbacks
return;
} else {
let automatic = rules.types[oldType].losslessConversionTo[newType];
if (automatic != null) {
automaticTransformers.push(automatic);
} else {
requiredTransformers.push({ type: "type", oldType, newType });
}
}
},
changeAttribute: function (attribute, oldValue, newValue) {
if (oldValue == null || newValue == null) {
// We're setting this attribute on this field for the first time (which should only happen during field creation). This also applies to inverse computation for rollbacks.
// NOTE: Even if a field is not required, it should always be initialized during field creation, using an implicit operation setting the default, so a legitimate revert to `undefined` should never be possible.
// FIXME: How to deal with this when a new attribute is introduced in a new schema DSL version? Should we just pretend that the old one always existed, by updating the old DSL implementation to insert it implicitly as a default? Probably should.
return;
} else {
let canBeAutomatic = rules.attributes[attribute].isLossless(oldValue, newValue);
if (canBeAutomatic) {
automaticTransformers.push(rules.attributes[attribute].losslessTransformer);
} else {
requiredTransformers.push({ type: "attribute", attribute, oldValue, newValue });
}
}
},
getResults: function (manualTransformer, isForward, fieldName) {
// NOTE: We disallow transformTo/rollbackTo when they are not required; if the user wishes to bulk-transform values, they should specify a changeRecords operation instead (which should probably require the user to specify both the forward and backward transform?). Otherwise, we cannot implement "maybe you forgot a rollbackTo" errors, because that's only actually an error when a transform isn't *required*, and so if a user 'overreaches' in their type transform to also do a value transform we can't detect missing corresponding rollbackTo logic.
// NOTE: There are deliberately duplicate conditional clauses in here to improve readability!
if (requiredTransformers.length === 0 && automaticTransformers.length === 0) {
if (manualTransformer == null) {
// Identity function; no changes were made that affect the value itself
return (value) => value;
} else {
// FIXME: Better error message
let operationName = (isForward) ? "transformTo" : "rollbackTo";
throw new Error(`A ${operationName} operation was specified, but no other schema changes require one. Maybe you meant to use updateRecords instead?`);
}
} else if (requiredTransformers.length === 0 && automaticTransformers.length > 0) {
return compose(automaticTransformers);
} else if (requiredTransformers.length > 0) {
if (manualTransformer == null) {
// FIXME: Have some sort of error templating abstracted out instead
let causes = requiredTransformers
.map((cause) => {
if (cause.type === "type") {
return ` - Field type changed from '${cause.oldType}' to '${cause.newType}'`;
} else if (cause.type === "attribute") {
let from = (isForward) ? cause.oldValue : cause.newValue;
let to = (isForward) ? cause.newValue : cause.oldValue;
return ` - Attribute '${cause.attribute}' changed from '${util.inspect(from)}' to '${util.inspect(to)}'`;
} else {
throw unreachable("Unrecognized cause type");
}
})
.join("\n");
let errorMessage = (isForward)
? `One or more schema changes for '${fieldName}' cannot be applied automatically, because existing data would lose precision. You need to specify a transformTo operation manually.`
: `One or more schema changes for '${fieldName}' cannot be applied automatically, because rolling back the migration would cause data to lose precision. You need to specify a rollbackTo operation manually.`
// FIXME: Better error message
throw new Error(`${errorMessage}\n\nCaused by:\n${causes}`);
} else {
// TODO: Is this the correct order, always letting the manual transformer act on the original value rather than the one post automatic transforms? Probably is, if we want automatic transforms to be transparent to the user (and not produce a leaky abstraction)
return compose([ manualTransformer, ... automaticTransformers ]);
}
} else {
throw unreachable("Impossible condition");
}
}
};
}
// FIXME: Throw an error if a non-required transformTo is specified without a corresponding rollbackTo
function changeType(schema, newType) {
if (schema.type != newType) {
let newSchema = { type: newType };
for (let attribute of Object.keys(schema)) {
if (attribute === "type" || !rules.attributes[attribute].validForTypes.has(newType)) {
continue;
} else {
newSchema[attribute] = schema[attribute];
}
}
return newSchema;
} else {
throw new Error(`Tried to set field type to '${newType}', but that is already the type`);
}
}
// FIXME: Track destructive forward migrations *and* rollbacks, and outside of dev mode require the user to confirm with eg. an --allow-destructive-rollbacks that this is okay to apply
function applyFieldOperations(fieldName, currentField = {}, operations) {
// { schema, forwardTransform, backwardTransform, transformsRequired, rollbackForbidden }
let schema = currentField.schema ?? {};
let lastForwardTransforms = currentField.forwardTransforms ?? [];
let lastBackwardTransforms = currentField.backwardTransforms ?? [];
let rollbackForbidden = currentField.rollbackForbidden ?? false; // FIXME: Actually use this as a safeguard in migrations
let forwardTransformComputer = createTransformComputer();
let backwardTransformComputer = createTransformComputer();
let explicitForwardTransform = null;
let explicitBackwardTransform = null;
for (let operation of operations) {
matchValue(operation.type, {
setFieldType: () => {
forwardTransformComputer.changeType(schema.type, operation.fieldType);
backwardTransformComputer.changeType(operation.fieldType, schema.type);
// NOTE: The logic for this is separated out into its own function because a bunch of complexity is needed for determining which attributes can be kept
schema = changeType(schema, operation.fieldType);
},
setAttribute: () => {
if (schema[operation.attribute] !== operation.value) {
let currentValue = schema[operation.attribute];
forwardTransformComputer.changeAttribute(operation.attribute, currentValue, operation.value);
backwardTransformComputer.changeAttribute(operation.attribute, operation.value, currentValue);
schema = { ... schema, [operation.attribute]: operation.value };
} else {
// FIXME: Error quality
throw new Error(`Tried to change '${operation.attribute}' attribute to '${operation.value}', but it's already set to that`);
}
},
transformTo: () => {
if (explicitForwardTransform == null) {
explicitForwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one transformTo per field per migration`);
}
},
rollbackTo: () => {
if (explicitBackwardTransform == null) {
explicitBackwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one rollbackTo per field per migration`);
}
},
forbidRollback: () => {
rollbackForbidden = true;
}
// TODO: rest of operations
});
}
let forwardTransform = forwardTransformComputer.getResults(explicitForwardTransform, true, fieldName);
let backwardTransform = backwardTransformComputer.getResults(explicitBackwardTransform, false, fieldName);
return {
schema: schema,
forwardTransforms: lastForwardTransforms.concat([ forwardTransform ]),
backwardTransforms: lastBackwardTransforms.concat([ backwardTransform ]),
rollbackForbidden: rollbackForbidden,
};
}
function collectionOperationReducer(collection, operation) {
return matchValue(operation.type, {
createField: () => immutableDeepMerge(collection, {
fields: {
[operation.name]: (field) => {
assert(field === undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, {}, operation.operations);
}
}
}),
modifyField: () => immutableDeepMerge(collection, {
fields: {
[operation.name]: (field) => {
assert(field !== undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, field, operation.operations);
}
}
}),
addIndex: () => immutableDeepMerge(collection, {
indexes: {
[operation.name]: operation.definition
}
})
});
}
function schemaOperationReducer(schema, operation) {
return matchValue(operation.type, {
createCollection: () => immutableDeepMerge(schema, {
collections: {
[operation.name]: (collection) => {
assert(collection === undefined); // FIXME: Produce a useful error here
return operation.operations.reduce(collectionOperationReducer, {});
}
}
}),
modifyCollection: () => immutableDeepMerge(schema, {
collections: {
[operation.name]: (collection) => {
assert(collection !== undefined); // FIXME: Produce a useful error here
return operation.operations.reduce(collectionOperationReducer, collection);
}
}
}),
deleteCollection: () => {
throw new Error(`Not implemented yet`);
}
});
}
module.exports = function reduceMigrations(migrationList, initial = {}) {
return migrationList.reduce((lastSchema, migration) => {
return migration.operations.reduce(schemaOperationReducer, lastSchema);
}, initial);
/* MARKER: Turn the resulting migration tree into 3 different views:
- final schema form, for encoding new entries
- structural changes, eg. newly created collections that need to be registered in some way before records are even generated for them
- record transformations, ie. a definition of how records in older schema versions need to have their values/structure transformed to conform to the new schema
Also think about how to deal with index roll-over, eg. a new runtime-generated index because of changing tzdb data
*/
};
let mergeMigrations = mergeByTemplate.createMerger({
collections: mergeByTemplate.anyProperty({
fields: mergeByTemplate.anyProperty((a, b) => {
})
})
});
/*
{
collections: {
users: {
fields: {
username: {
schema: { type: 'string', required: true },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
},
passwordHash: {
schema: { type: 'string', required: true },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
},
emailAddress: {
schema: { type: 'string', required: true },
forwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
rollbackForbidden: false
},
isActive: {
schema: { type: 'boolean', required: true },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
},
registrationDate: {
schema: { type: 'date', required: true, withTimezone: true },
forwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
rollbackForbidden: false
},
invitesLeft: {
schema: { type: 'integer', required: true, signed: false },
forwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)], [Function (anonymous)] ],
rollbackForbidden: false
},
sendNewsletter: {
schema: { type: 'boolean', required: true, defaultValue: false },
forwardTransforms: [ [Function (anonymous)] ],
backwardTransforms: [ [Function (anonymous)] ],
rollbackForbidden: false
}
}
}
}
}
*/
// let dummyMigrations = [
// { id: 1, operations: [
// { type: "createCollection", name: "users", operations: [
// { type: "createField", name: "username", fieldType: "string", required: true },
// { type: "createField", name: "passwordHash", fieldType: "string", required: true },
// { type: "createField", name: "emailAddress", fieldType: "string", required: false },
// { type: "createField", name: "isActive", fieldType: "boolean", required: true },
// { type: "createField", name: "registrationDate", fieldType: "date", required: true, withTimezone: false },
// { type: "createField", name: "invitesLeft", fieldType: "integer", required: true },
// ]}
// ]},
// { id: 2, operations: [
// { type: "modifyCollection", name: "users", operations: [
// { type: "setFieldAttributes", name: "emailAddress", required: false },
// { type: "setFieldAttributes", name: "isActive", required: true },
// { type: "setFieldAttributes", name: "registrationDate", withTimezone: true },
// { type: "setFieldAttributes", name: "invitesLeft", signed: false },
// ]}
// ]},
// ];
let dummyMigrations = [
{ id: 1, operations: [
{ type: "createCollection", name: "users", operations: [
{ type: "createField", name: "username", operations: [
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "passwordHash", operations: [
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "emailAddress", operations: [
{ type: "setFieldType", fieldType: "string" },
{ type: "setAttribute", attribute: "required", value: false }
]},
{ type: "createField", name: "isActive", operations: [
{ type: "setFieldType", fieldType: "boolean" },
{ type: "setAttribute", attribute: "required", value: true }
]},
{ type: "createField", name: "registrationDate", operations: [
{ type: "setFieldType", fieldType: "date" },
{ type: "setAttribute", attribute: "required", value: true },
{ type: "setAttribute", attribute: "withTimezone", value: false },
]},
{ type: "createField", name: "invitesLeft", operations: [
{ type: "setFieldType", fieldType: "integer" },
{ type: "setAttribute", attribute: "required", value: true },
]},
]}
]},
{ id: 2, operations: [
{ type: "modifyCollection", name: "users", operations: [
{ type: "modifyField", name: "emailAddress", operations: [
{ type: "setAttribute", attribute: "required", value: true },
]},
// FIXME: Disallow no-ops for attribute changes?
// { type: "modifyField", name: "isActive", operations: [
// { type: "setAttribute", attribute: "required", value: true },
// ]},
{ type: "modifyField", name: "registrationDate", operations: [
{ type: "setAttribute", attribute: "withTimezone", value: true },
{ type: "rollbackTo", transformer: (value) => value.toUTC() }
]},
{ type: "modifyField", name: "invitesLeft", operations: [
{ type: "setAttribute", attribute: "signed", value: false },
]},
{ type: "createField", name: "sendNewsletter", operations: [
{ type: "setFieldType", fieldType: "boolean" },
{ type: "setAttribute", attribute: "required", value: true }, // FIXME: Enforce a default in this case! Otherwise existing columns would be invalid -- actually this should be handled by 'migration defaults' specifically, without requiring a default for new records
// FIXME: The below lazy function is currently getting evaluated at schema reduce time, because of the immutable deep merge. *Really* need to work this into merge-by-template instead to prevent cases like this!
{ type: "setAttribute", attribute: "defaultValue", value: () => false }, // FIXME: Always specified as a value-producing function, or also allow literals?
]},
]}
]},
];
console.dir(module.exports(dummyMigrations), { depth: null });

12
yarn.lock

@ -1006,7 +1006,7 @@ assert-plus@1.0.0, assert-plus@^1.0.0:
assure-array@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/assure-array/-/assure-array-1.0.0.tgz#4f4ad16a87659d6200a4fb7103462033d216ec1f"
integrity sha1-T0rRaodlnWIApPtxA0YgM9IW7B8=
integrity sha512-igvOvGYidAcJKr6YQIHzLivUpAdqUfi7MN0QfrEnFtifQvuw6D0W4oInrIVgTaefJ+QBVWAj8ZYuUGNnwq6Ydw==
astral-regex@^2.0.0:
version "2.0.0"
@ -3033,16 +3033,16 @@ make-dir@^3.0.0, make-dir@^3.0.2:
dependencies:
semver "^6.0.0"
map-obj@4:
version "4.3.0"
resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a"
integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==
map-obj@^1.0.0, map-obj@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d"
integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=
map-obj@^4:
version "4.3.0"
resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a"
integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==
match-value@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/match-value/-/match-value-1.1.0.tgz#ad311ef8bbe2d344a53ec3104e28fe221984b98e"

Loading…
Cancel
Save