WIP (optimizer infrastructure)
parent
2eb636924c
commit
b5d0ca7c53
@ -1,3 +1,6 @@
|
||||
{
|
||||
"extends": "@joepie91/eslint-config"
|
||||
"extends": "@joepie91/eslint-config",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2020
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
|
||||
const benchmark = require("benchmark");
|
||||
|
||||
const { select, where, anyOf, allOf, lessThan, moreThan, unsafeSQL, parameter } = require("../src/operations");
|
||||
|
||||
let suite = new benchmark.Suite();
|
||||
|
||||
suite
|
||||
.add("Build standard query", () => {
|
||||
let query = select("projects", [
|
||||
where({
|
||||
foo: anyOf([ "bar", "baz", anyOf([ "bar2", "baz2" ]), unsafeSQL("TRUE") ]),
|
||||
qux: anyOf([ 13, moreThan(42) ]),
|
||||
complex: anyOf([
|
||||
30,
|
||||
40,
|
||||
allOf([
|
||||
moreThan(100),
|
||||
lessThan(200),
|
||||
lessThan(parameter("max"))
|
||||
])
|
||||
])
|
||||
}),
|
||||
where({ second: 2 })
|
||||
]);
|
||||
})
|
||||
.on("cycle", (event) => {
|
||||
console.log(String(event.target));
|
||||
})
|
||||
.on("complete", function () {
|
||||
console.log('Fastest is ' + this.filter('fastest').map('name'));
|
||||
})
|
||||
.run({
|
||||
async: true,
|
||||
minSamples: 500
|
||||
});
|
@ -0,0 +1,182 @@
|
||||
"use strict";
|
||||
|
||||
const util = require("util");
|
||||
const syncpipe = require("syncpipe");
|
||||
const debug = require("debug");
|
||||
|
||||
const NoChange = require("../../optimizers/util/no-change");
|
||||
const RemoveNode = require("../../optimizers/util/remove-node");
|
||||
const unreachable = require("../../unreachable");
|
||||
const typeOf = require("../../type-of");
|
||||
const measureTime = require("../../measure-time");
|
||||
|
||||
// FIXME: Consider deepcopying the tree once, and then mutating that tree, instead of doing everything immutably; this might be significantly faster when a few iterations are needed to stabilize the tree, as that might otherwise result in many copies of the subtree(s) leeding up to the changed node(s), one for each iteration.
|
||||
// FIXME: Consider whether inverting the evaluation order (deepest-first rather than shallowest-first) can remove the need for multiple optimization passes and stabilization detection.
|
||||
// FIXME: Verify that changed nodes actually result in a change in where the walker goes!
|
||||
|
||||
function createDebuggers(optimizers) {
|
||||
let debuggers = {};
|
||||
|
||||
for (let optimizer of optimizers) {
|
||||
debuggers[optimizer.name] = debug(`raqb:ast:optimize:${optimizer.name}`);
|
||||
}
|
||||
|
||||
return debuggers;
|
||||
}
|
||||
|
||||
function createTimings(optimizers) {
|
||||
let timings = {};
|
||||
|
||||
for (let optimizer of optimizers) {
|
||||
timings[optimizer.name] = 0n;
|
||||
}
|
||||
|
||||
return timings;
|
||||
}
|
||||
|
||||
function combineOptimizers(optimizers) {
|
||||
let allVisitors = {};
|
||||
|
||||
for (let optimizer of optimizers) {
|
||||
for (let [ key, visitor ] of Object.entries(optimizer.visitors)) {
|
||||
if (allVisitors[key] == null) {
|
||||
allVisitors[key] = [];
|
||||
}
|
||||
|
||||
allVisitors[key].push({
|
||||
name: optimizer.name,
|
||||
func: visitor
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return allVisitors;
|
||||
}
|
||||
|
||||
// FIXME: StopMatching marker to signal that eg. a generic visitor should no longer match after a specific one?
|
||||
// FIXME: OriginalNode marker to explicitly indicate that any transformations applied by *other* visitors should be thrown out?
|
||||
|
||||
module.exports = function optimizeTree(ast, optimizers) {
|
||||
// NOTE: Depth-first!
|
||||
let visitors = combineOptimizers(optimizers);
|
||||
let timings = createTimings(optimizers);
|
||||
let debuggers = createDebuggers(optimizers);
|
||||
// FIXME: Dirty tracking for stabilization detection
|
||||
|
||||
function applyVisitors(node, visitors) {
|
||||
if (visitors == null) {
|
||||
// We handle this here to make the `handle` pipeline more readable
|
||||
return node;
|
||||
} else {
|
||||
let lastNode = node;
|
||||
|
||||
for (let visitor of visitors) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
let { value: result, time } = measureTime(() => {
|
||||
return visitor.func(lastNode);
|
||||
});
|
||||
|
||||
timings[visitor.name] += time;
|
||||
|
||||
if (result === NoChange) {
|
||||
// no-op
|
||||
} else if (result == null) {
|
||||
throw new Error(`A visitor is not allowed to return null or undefined; if you intended to leave the node untouched, return a NoChange marker instead`);
|
||||
} else if (result === RemoveNode) {
|
||||
debuggers[visitor.name](`Node of type '${typeOf(lastNode)}' removed`);
|
||||
lastNode = RemoveNode;
|
||||
break; // Node has gone stale, stop applying visitors to it
|
||||
} else if (result.__raqbASTNode === true) {
|
||||
// New subtree to replace the old one
|
||||
if (result === node) {
|
||||
// Visitor returned the original node again; but in this case, it should return NoChange instead. We enforce this because after future changes to the optimizer implementation (eg. using an internally-mutable deep copy of the tree), we may no longer be able to *reliably* detect when the original node is returned; so it's best to already get people into the habit of returning a NoChange marker in those cases, by disallowing this.
|
||||
throw new Error(`Visitor returned original node, but this may not work reliably; if you intended to leave the node untouched, return a NoChange marker instead`);
|
||||
} else {
|
||||
debuggers[visitor.name](`Node of type '${typeOf(lastNode)}' replaced by node of type '${typeOf(result)}'`);
|
||||
lastNode = result;
|
||||
break; // Node has gone stale, stop applying visitors to it
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Visitor returned an unexpected type of return value: ${util.inspect(result)}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (lastNode !== node) {
|
||||
// We re-evalue the new node before leaving control to the children handler, as the old one has been substituted, and therefore new visitors might be applicable.
|
||||
return handleSelf(lastNode);
|
||||
} else {
|
||||
return lastNode;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleSelf(node) {
|
||||
return syncpipe(node, [
|
||||
(_) => applyVisitors(_, visitors[_.type]),
|
||||
(_) => applyVisitors(_, visitors["*"]),
|
||||
]);
|
||||
}
|
||||
|
||||
function handleChildren(node) {
|
||||
// FIXME: Eventually hardcode the available properties for different node types (and them being single/multiple), for improved performance?
|
||||
let changedProperties = {};
|
||||
|
||||
for (let [ property, value ] of Object.entries(node)) {
|
||||
if (value == null) {
|
||||
continue;
|
||||
} else if (value.__raqbASTNode === true) {
|
||||
changedProperties[property] = handle(value);
|
||||
} else if (Array.isArray(value) && value.length > 0 && value[0].__raqbASTNode === true) {
|
||||
// NOTE: We assume that if an array in an AST node property contains one AST node, *all* of its items are AST nodes. This should be ensured by the input wrapping in the operations API.
|
||||
changedProperties[property] = value
|
||||
.map((item) => handle(item))
|
||||
.filter((item) => item !== RemoveNode);
|
||||
} else {
|
||||
// Probably some kind of literal value; we don't touch these.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(changedProperties).length === 0) {
|
||||
return node;
|
||||
} else {
|
||||
let newNode = Object.assign({}, node, changedProperties);
|
||||
|
||||
// FIXME: Think carefully about whether there is *ever* a valid reason to remove a single node! As array items are already taken care of above, and leave an empty array at worst, which can make sense. Possibly we even need to encode this data into node type metadata.
|
||||
for (let [ key, value ] of Object.entries(newNode)) {
|
||||
if (value === RemoveNode) {
|
||||
delete newNode[key];
|
||||
}
|
||||
}
|
||||
|
||||
return newNode;
|
||||
}
|
||||
}
|
||||
|
||||
function handle(node) {
|
||||
// FIXME: Possibly optimize the "node gets returned unchanged" case, somehow? Perhaps by propagating the NoChange marker? But object creation is fast, so that may actually make things slower than just blindly creating new objects...
|
||||
return syncpipe(node, [
|
||||
(_) => handleSelf(_),
|
||||
(_) => handleChildren(_)
|
||||
]);
|
||||
}
|
||||
|
||||
let { value: rootNode, time } = measureTime(() => {
|
||||
return handle(ast);
|
||||
});
|
||||
|
||||
let timeSpentInOptimizers = Object.values(timings).reduce((sum, n) => sum + n, 0n);
|
||||
|
||||
if (rootNode !== RemoveNode) {
|
||||
return {
|
||||
ast: rootNode,
|
||||
timings: {
|
||||
"# Total": time,
|
||||
"# Walker overhead": time - timeSpentInOptimizers,
|
||||
... timings,
|
||||
}
|
||||
};
|
||||
} else {
|
||||
unreachable("Root node was removed");
|
||||
}
|
||||
};
|
@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = function evaluateCyclicalModulesOnto(resultObject = {}, moduleMapping) {
|
||||
for (let [ key, moduleInitializer ] of Object.entries(moduleMapping)) {
|
||||
resultObject[key] = moduleInitializer(resultObject);
|
||||
}
|
||||
|
||||
return resultObject;
|
||||
};
|
@ -0,0 +1,5 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = function filterType(type, allNodes) {
|
||||
return allNodes.filter((node) => node.type === type);
|
||||
};
|
@ -0,0 +1,25 @@
|
||||
"use strict";
|
||||
|
||||
const { validateOptions } = require("@validatem/core");
|
||||
const required = require("@validatem/required");
|
||||
const oneOf = require("@validatem/one-of");
|
||||
const arrayOf = require("@validatem/array-of");
|
||||
|
||||
const node = require("../ast-node");
|
||||
|
||||
module.exports = function (operations) {
|
||||
const isValueExpression = require("../validators/operations/is-value-expression")(operations);
|
||||
|
||||
return function _arrayOf(_options) {
|
||||
let { type, items } = validateOptions(arguments, {
|
||||
type: [ required, oneOf([ "anyOf", "allOf" ]) ],
|
||||
items: [ required, arrayOf(isValueExpression) ]
|
||||
});
|
||||
|
||||
return node({
|
||||
type: "_arrayOf",
|
||||
listType: type,
|
||||
items: items
|
||||
});
|
||||
};
|
||||
};
|
@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
|
||||
const { validateOptions } = require("@validatem/core");
|
||||
const required = require("@validatem/required");
|
||||
const oneOf = require("@validatem/one-of");
|
||||
const either = require("@validatem/either");
|
||||
|
||||
const node = require("../ast-node");
|
||||
|
||||
module.exports = function (operations) {
|
||||
const isInternalArrayType = require("../validators/operations/is-internal-array-type")(operations);
|
||||
const isValueExpression = require("../validators/operations/is-value-expression")(operations);
|
||||
|
||||
return function _condition(_options) {
|
||||
let { type, expression } = validateOptions(arguments, {
|
||||
type: [ required, oneOf([ "lessThan", "moreThan", "equals" ]) ],
|
||||
expression: [ required, either([ isValueExpression, isInternalArrayType ]) ]
|
||||
});
|
||||
|
||||
return node({
|
||||
type: "condition",
|
||||
conditionType: type,
|
||||
expression: expression
|
||||
});
|
||||
};
|
||||
};
|
@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
|
||||
const operations = require("../operations");
|
||||
const evaluateCyclicalModulesOnto = require("../evaluate-cyclical-modules-onto");
|
||||
|
||||
// Shallow clone, so that internal operations can see the public API, but not vice versa
|
||||
let internalOperations = Object.assign({}, operations);
|
||||
|
||||
evaluateCyclicalModulesOnto(internalOperations, {
|
||||
_condition: require("./condition"),
|
||||
_arrayOf: require("./array-of"),
|
||||
});
|
||||
|
||||
module.exports = internalOperations;
|
@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = function measureTime(func) {
|
||||
let startTime = process.hrtime.bigint();
|
||||
let result = func();
|
||||
let endTime = process.hrtime.bigint();
|
||||
|
||||
return {
|
||||
value: result,
|
||||
time: (endTime - startTime)
|
||||
};
|
||||
};
|
@ -0,0 +1,133 @@
|
||||
"use strict";
|
||||
|
||||
const matchValue = require("match-value");
|
||||
const flatten = require("flatten");
|
||||
const syncpipe = require("syncpipe");
|
||||
|
||||
const operations = require("../operations");
|
||||
const internalOperations = require("../internal-operations");
|
||||
const concat = require("../concat");
|
||||
const NoChange = require("./util/no-change");
|
||||
|
||||
// FIXME: Have some sort of internally-cacheable way to find nodes of a certain type? So that different optimizer visitors don't need to filter the list of clauses over and over again...
|
||||
|
||||
function leftIdentity(left) {
|
||||
// NOTE: This uses JSON.stringify, since that gives us fast escaping for free; which is important to prevent bugs and/or injection-related security issues in the serialized names
|
||||
if (left.type === "columnName") {
|
||||
return `column:${JSON.stringify(left.name)}`;
|
||||
} else if (left.type === "foreignColumnName") {
|
||||
return `foreignColumn:${JSON.stringify([ left.table.name, left.column.name ])}`;
|
||||
} else if (left.type === "sqlExpression") {
|
||||
return `sqlExpression:${JSON.stringify(left.expression)}`;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function createExpressionTracker() {
|
||||
let leftMapping = new Map();
|
||||
let hasSeenPossibleArray = false;
|
||||
|
||||
return {
|
||||
addExpression: function (expression) {
|
||||
let identity = leftIdentity(expression.left);
|
||||
let conditionType = expression.condition.conditionType;
|
||||
|
||||
if (!leftMapping.has(identity)) {
|
||||
leftMapping.set(identity, new Map());
|
||||
}
|
||||
|
||||
let conditionTypeMapping = leftMapping.get(identity);
|
||||
|
||||
if (!conditionTypeMapping.has(conditionType)) {
|
||||
conditionTypeMapping.set(conditionType, []);
|
||||
} else {
|
||||
// Both the left identity and conditionType match, so this can be turned into an array
|
||||
hasSeenPossibleArray = true;
|
||||
}
|
||||
|
||||
// We store the entire original expression object, so that the new-node-generation code can pick out the expression metadata later. Since everything is grouped by identity and condition type, that code can just assume that the metadata of the first item in the list (if there's more than one) applies to *all* of the items in that list.
|
||||
conditionTypeMapping.get(conditionType).push(expression);
|
||||
},
|
||||
getMapping: function () {
|
||||
return leftMapping;
|
||||
},
|
||||
arrayIsPossible: function () {
|
||||
return hasSeenPossibleArray;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createHandler(type) {
|
||||
// FIXME: Improve matchValue to distinguish between "arm not specified at all" and "arm holds undefined as a specified value", to deal with things like accidental operations.anyOfExpressions
|
||||
let expressionOperation = matchValue.literal(type, {
|
||||
"anyOfExpressions": operations.anyOf,
|
||||
"allOfExpressions": operations.allOf
|
||||
});
|
||||
|
||||
let internalArrayType = matchValue(type, {
|
||||
"anyOfExpressions": "anyOf",
|
||||
"allOfExpressions": "allOf"
|
||||
});
|
||||
|
||||
return function arrayifyPredicateList(node) {
|
||||
// FIXME: Also detect non-parameterizable cases like raw SQL!
|
||||
let tracker = createExpressionTracker();
|
||||
console.log(node);
|
||||
|
||||
for (let item of node.items) {
|
||||
// Only regular expressions can be arrayified, not {all,any}OfExpressions, which will get visited by this optimizer later on anyway
|
||||
// FIXME: Also ignore already-processed arrays
|
||||
if (item.type === "expression") {
|
||||
tracker.addExpression(item);
|
||||
}
|
||||
}
|
||||
|
||||
if (tracker.arrayIsPossible()) {
|
||||
let newExpressions = syncpipe(tracker, [
|
||||
(_) => _.getMapping(),
|
||||
(_) => Array.from(_.values()),
|
||||
(_) => _.map((conditionMapping) => syncpipe(conditionMapping, [
|
||||
(_) => Array.from(_.entries()),
|
||||
(_) => _.map(([ conditionType, expressions ]) => {
|
||||
if (expressions.length === 1) {
|
||||
return expressions[0];
|
||||
} else {
|
||||
let allValues = expressions.map((expression) => expression.condition.expression);
|
||||
|
||||
return operations.expression({
|
||||
left: expressions[0].left,
|
||||
condition: internalOperations._condition({
|
||||
type: conditionType,
|
||||
expression: internalOperations._arrayOf({
|
||||
type: internalArrayType,
|
||||
items: allValues
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
})
|
||||
])),
|
||||
(_) => flatten(_)
|
||||
]);
|
||||
|
||||
let untouchedExpressions = node.items.filter((item) => item.type !== "expression");
|
||||
|
||||
return expressionOperation(concat([
|
||||
newExpressions,
|
||||
untouchedExpressions
|
||||
]));
|
||||
} else {
|
||||
return NoChange;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: "arrayify-predicate-lists",
|
||||
category: [ "readability" ],
|
||||
visitors: {
|
||||
allOfExpressions: createHandler("allOfExpressions"),
|
||||
anyOfExpressions: createHandler("anyOfExpressions"),
|
||||
}
|
||||
};
|
@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
|
||||
const splitFilter = require("split-filter");
|
||||
|
||||
const operations = require("../operations");
|
||||
const NoChange = require("./util/no-change");
|
||||
|
||||
// FIXME: Have some sort of internally-cacheable way to find nodes of a certain type? So that different optimizer visitors don't need to filter the list of clauses over and over again...
|
||||
|
||||
module.exports = {
|
||||
name: "collapse-where",
|
||||
category: [ "normalization" ],
|
||||
visitors: {
|
||||
select: ({ table, clauses }) => {
|
||||
let [ whereClauses, otherClauses ] = splitFilter(clauses, (clause) => clause.type === "where");
|
||||
|
||||
if (whereClauses.length > 1) {
|
||||
let whereExpressions = whereClauses.map((clause) => clause.expression);
|
||||
let newWhere = operations.where(operations.allOf(whereExpressions));
|
||||
|
||||
return operations.select(table, [ newWhere ].concat(otherClauses));
|
||||
} else {
|
||||
return NoChange;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
|
||||
const matchValue = require("match-value");
|
||||
|
||||
const operations = require("../operations");
|
||||
const typeOf = require("../type-of");
|
||||
const unreachable = require("../unreachable");
|
||||
const NoChange = require("./util/no-change");
|
||||
|
||||
module.exports = {
|
||||
name: "conditions-to-expressions",
|
||||
category: [ "normalization" ],
|
||||
visitors: {
|
||||
expression: (rootNode) => {
|
||||
if (rootNode.condition.type === "condition") {
|
||||
return NoChange;
|
||||
} else {
|
||||
// anyOfConditions, allOfConditions, notCondition
|
||||
|
||||
function convertNode(node) {
|
||||
let listOperation = matchValue.literal(typeOf(node), {
|
||||
anyOfConditions: operations.anyOf,
|
||||
allOfConditions: operations.allOf,
|
||||
notCondition: null,
|
||||
condition: null
|
||||
});
|
||||
|
||||
if (listOperation != null) {
|
||||
return listOperation(node.items.map((item) => convertNode(item)));
|
||||
} else if (typeOf(node) === "notCondition") {
|
||||
return operations.notExpression(convertNode(node.condition));
|
||||
} else if (typeOf(node) === "condition") {
|
||||
return operations.expression({
|
||||
left: rootNode.left,
|
||||
condition: node
|
||||
});
|
||||
} else {
|
||||
unreachable(`Encountered node type '${node.type}' within condition modifier`);
|
||||
}
|
||||
}
|
||||
|
||||
return convertNode(rootNode.condition);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
@ -0,0 +1,52 @@
|
||||
"use strict";
|
||||
|
||||
const matchValue = require("match-value");
|
||||
|
||||
const operations = require("../operations");
|
||||
const typeOf = require("../type-of");
|
||||
const NoChange = require("./util/no-change");
|
||||
|
||||
// FIXME: Generalize to all predicate lists?
|
||||
|
||||
function createHandler(type) {
|
||||
let subItemProperty = matchValue(type, {
|
||||
notCondition: "condition",
|
||||
notExpression: "expression"
|
||||
});
|
||||
|
||||
return function flattenNotPredicates(expression) {
|
||||
// Flattens multiple levels of like-typed not(...) wrappers, ending up with a logically equivalent subtree
|
||||
// `notCondition(condition)` -> `notCondition(condition)`
|
||||
// `notCondition(notCondition(condition))` -> `condition`
|
||||
// `notCondition(notCondition(notCondition(condition)))` -> `notCondition(condition)`
|
||||
// etc.
|
||||
let notLevels = 0;
|
||||
let currentItem = expression;
|
||||
|
||||
while(typeOf(currentItem) === type) {
|
||||
notLevels += 1;
|
||||
currentItem = currentItem[subItemProperty];
|
||||
}
|
||||
|
||||
if (notLevels === 1) {
|
||||
return NoChange;
|
||||
} else {
|
||||
let hasNot = (notLevels % 2) === 1;
|
||||
|
||||
if (hasNot) {
|
||||
return operations.not(currentItem);
|
||||
} else {
|
||||
return currentItem;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: "flatten-not-predicates",
|
||||
category: [ "readability" ],
|
||||
visitors: {
|
||||
notExpression: createHandler("notExpression"),
|
||||
notCondition: createHandler("notCondition")
|
||||
}
|
||||
};
|
@ -0,0 +1,66 @@
|
||||
"use strict";
|
||||
|
||||
const matchValue = require("match-value");
|
||||
|
||||
const operations = require("../operations");
|
||||
const typeOf = require("../type-of");
|
||||
const NoChange = require("./util/no-change");
|
||||
const RemoveNode = require("./util/remove-node");
|
||||
|
||||
// FIXME: Generalize to all predicate lists?
|
||||
|
||||
function createHandler(type) {
|
||||
let listOperation = matchValue.literal(type, {
|
||||
anyOfExpressions: operations.anyOf,
|
||||
allOfExpressions: operations.allOf,
|
||||
anyOfConditions: operations.anyOf,
|
||||
allOfConditions: operations.allOf,
|
||||
});
|
||||
|
||||
return function flattenPredicateList(list) {
|
||||
let hasNestedPredicates = list.items.some((item) => typeOf(item) === type);
|
||||
let hasSingleItem = (list.items.length === 1); // For unnecessary anyOf/allOf wrapping, which is also handled by this optimizer
|
||||
let mustFlatten = hasNestedPredicates || hasSingleItem;
|
||||
|
||||
if (mustFlatten) {
|
||||
let actualItems = [];
|
||||
|
||||
function collectItemsRecursively(node) {
|
||||
for (let subItem of node.items) {
|
||||
if (typeOf(subItem) === type) {
|
||||
collectItemsRecursively(subItem);
|
||||
} else {
|
||||
actualItems.push(subItem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
collectItemsRecursively(list);
|
||||
|
||||
if (actualItems.length === 0) {
|
||||
// FIXME: Do we want to log this as a warning? It *could* occur when items get eliminated by another optimizer, but it could also be the result of a bug...
|
||||
console.warn("Encountered 0 actual items in predicate list");
|
||||
|
||||
return RemoveNode;
|
||||
} else if (actualItems.length === 1) {
|
||||
// Wrapping is pointless here.
|
||||
return actualItems[0];
|
||||
} else {
|
||||
return listOperation(actualItems);
|
||||
}
|
||||
} else {
|
||||
return NoChange;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: "flatten-predicate-lists",
|
||||
category: [ "readability" ],
|
||||
visitors: {
|
||||
allOfExpressions: createHandler("allOfExpressions"),
|
||||
anyOfExpressions: createHandler("anyOfExpressions"),
|
||||
allOfConditions: createHandler("allOfConditions"),
|
||||
anyOfConditions: createHandler("anyOfConditions"),
|
||||
}
|
||||
};
|
@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = Symbol("NoChange");
|
@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = Symbol("RemoveNode");
|
@ -1,5 +1,5 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = function unreachable(reason) {
|
||||
throw new Error(`This code should never be run: ${reason}; this is a bug in raqb, please report it!`);
|
||||
throw new Error(`${reason}; this is a bug in raqb, please report it!`);
|
||||
};
|
||||
|
@ -1,12 +1,7 @@
|
||||
"use strict";
|
||||
|
||||
const either = require("@validatem/either");
|
||||
|
||||
module.exports = function (operations) {
|
||||
const isObjectType = require("./is-object-type")(operations);
|
||||
|
||||
return either([
|
||||
[ isObjectType("_internalAnyOfArray") ],
|
||||
[ isObjectType("_internalAllOfArray") ],
|
||||
]);
|
||||
return [ isObjectType("_arrayOf") ];
|
||||
};
|
||||
|
Loading…
Reference in New Issue