You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

177 lines
7.8 KiB
JavaScript

/* eslint-disable no-loop-func */
"use strict";
const assert = require("assert");
const matchValue = require("match-value");
const immutableDeepMerge = require("../../packages/immutable-deep-merge");
const treeToOperations = require("../../packages/tree-to-operations");
const schemaRules = require("./schema-rules");
const operationTreeRules = require("./operation-tree-rules");
const createTransformComputer = require("./transform-computer");
// FIXME: table/row terminology etc.
// FIXME: replace asserts with proper checks and error messages
// TODO: We're no longer going to use merge-by-template, but we still need to deal with the issue where function literals are treated like lazy values in deep immutable merges
// TODO: Add API for "set this object literally, no merge" -- is that actually necessary, can't we just have a transform function that only returns the new value directly?
// FIXME: Find a way to support arrays? Particularly objects *within* arrays, which would also need to be merged recursively...
// FIXME: requiresMigrationDefault and requiredAttributes validation, as well as verifying that a type has actually been set by the end of a createField operation
// FIXME: Track exactly which data would be lost upon a rollback so that the user can be asked for confirmation first
// FIXME: Throw an error if a non-required transformTo is specified without a corresponding rollbackTo
function changeType(schema, newType) {
if (schema.type != newType) {
let newSchema = { type: newType };
for (let attribute of Object.keys(schema)) {
if (attribute === "type" || !schemaRules.attributes[attribute].validForTypes.has(newType)) {
continue;
} else {
newSchema[attribute] = schema[attribute];
}
}
return newSchema;
} else {
throw new Error(`Tried to set field type to '${newType}', but that is already the type`);
}
}
// FIXME: Track destructive forward migrations *and* rollbacks, and outside of dev mode require the user to confirm with eg. an --allow-destructive-rollbacks that this is okay to apply
function applyFieldOperations(fieldName, currentField = {}, operations) {
let schema = currentField.schema ?? {};
let lastForwardTransforms = currentField.forwardTransforms ?? [];
let lastBackwardTransforms = currentField.backwardTransforms ?? [];
let rollbackForbidden = currentField.rollbackForbidden ?? false; // FIXME: Actually use this as a safeguard in migrations
let forwardTransformComputer = createTransformComputer();
let backwardTransformComputer = createTransformComputer();
let explicitForwardTransform = null;
let explicitBackwardTransform = null;
for (let operation of operations) {
matchValue(operation.type, {
setFieldType: () => {
forwardTransformComputer.changeType(schema.type, operation.fieldType);
backwardTransformComputer.changeType(operation.fieldType, schema.type);
// NOTE: The logic for this is separated out into its own function because a bunch of complexity is needed for determining which attributes can be kept
schema = changeType(schema, operation.fieldType);
},
setAttribute: () => {
if (schema[operation.attribute] !== operation.value) {
let currentValue = schema[operation.attribute];
forwardTransformComputer.changeAttribute(operation.attribute, currentValue, operation.value);
backwardTransformComputer.changeAttribute(operation.attribute, operation.value, currentValue);
schema = { ... schema, [operation.attribute]: operation.value };
} else {
// FIXME: Error quality
throw new Error(`Tried to change '${operation.attribute}' attribute to '${operation.value}', but it's already set to that`);
}
},
transformTo: () => {
if (explicitForwardTransform == null) {
explicitForwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one transformTo per field per migration`);
}
},
rollbackTo: () => {
if (explicitBackwardTransform == null) {
explicitBackwardTransform = operation.transformer;
} else {
// FIXME: Error quality
throw new Error(`You can only specify one rollbackTo per field per migration`);
}
},
forbidRollback: () => {
rollbackForbidden = true;
}
// TODO: rest of operations
});
}
let forwardTransform = forwardTransformComputer.getResults(explicitForwardTransform, true, fieldName);
let backwardTransform = backwardTransformComputer.getResults(explicitBackwardTransform, false, fieldName);
return {
schema: schema,
forwardTransforms: lastForwardTransforms.concat([ forwardTransform ]),
backwardTransforms: lastBackwardTransforms.concat([ backwardTransform ]),
rollbackForbidden: rollbackForbidden,
};
}
module.exports = function evaluateMigrations(migrationList, initial = {}) {
/* NOTE: This function isn't always called with the *full* set of migrations from the start; it can also be called with just a subset of them, to determine the exact transformations from one given schema revision to another.
De facto, the evaluated schema resulting from this function is divided into two parts:
1. The absolute schema; this defines what the final total schema looks like, and must always consider *all* migrations from the very start, even when only evaluating a subset.
2. The schema delta; this describes the (data) mutations introduced by this specific subset of migrations, and *not* those of previous migrations. This still requires the previous absolute schema to function, otherwise it would not be possible to determine the data mutations caused by the first migration in the evaluated set.
In practice, this means that we initialize the merge operation with a copy of the schema as it existed prior to this evaluated set - but *only* the absolute component of that schema, not the schema delta. Then after evaluation, the schema delta will only contain those transformations introduced by this subset.
// FIXME: Actually do the initializing stuff described here, lol
*/
let operations = treeToOperations(operationTreeRules, migrationList);
return operations.reduce((last, operation) => {
return matchValue(operation.type, {
createCollection: () => immutableDeepMerge(last, {
collections: {
[operation.name]: (collection) => {
assert(collection === undefined); // FIXME: Produce a useful error here
return {};
}
}
}),
deleteCollection: () => {
throw new Error(`Not implemented yet`);
},
createField: () => immutableDeepMerge(last, {
collections: {
[operation.collection]: {
fields: {
[operation.name]: (field) => {
assert(field === undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, {}, operation.operations);
}
}
}
}
}),
modifyField: () => immutableDeepMerge(last, {
collections: {
[operation.collection]: {
fields: {
[operation.name]: (field) => {
assert(field !== undefined); // FIXME: Produce a useful error here
return applyFieldOperations(operation.name, field, operation.operations);
}
}
}
}
}),
deleteField: () => {
throw new Error(`Not implemented yet`);
},
createIndex: () => {
throw new Error(`Not implemented yet`);
},
deleteIndex: () => {
throw new Error(`Not implemented yet`);
}
});
}, initial);
/* MARKER: Turn the resulting migration tree into 3 different views:
- final schema form, for encoding new entries
- structural changes, eg. newly created collections that need to be registered in some way before records are even generated for them
- record transformations, ie. a definition of how records in older schema versions need to have their values/structure transformed to conform to the new schema
Also think about how to deal with index roll-over, eg. a new runtime-generated index because of changing tzdb data
*/
};