Initial commit
commit
a795bb5a06
@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
junk
|
@ -0,0 +1,11 @@
|
||||
# dlayer
|
||||
|
||||
## This library (and its documentation) is still a work-in-progress!
|
||||
|
||||
dlayer is a GraphQL-like data access layer. It lets you combine data from multiple sources into a single coherent API - and it's designed for application-internal use first and foremost, with network accessibility being an optional add-on.
|
||||
|
||||
dlayer differs from GraphQL in a number of important ways:
|
||||
|
||||
- dlayer supports recursive queries without needing schema hacks; both bounded and unbounded.
|
||||
- dlayer is modular by default; a dlayer API can be composed of many independent 'modules' that can reference and extend each other when available. This allows for eg. constructing plugin systems, like in [sysquery](FIXME), as well as making it easier to loosely couple your API definition.
|
||||
- dlayer does not use a separate schema; the schema is implicitly defined by what the API returns. This essentially makes it dynamically typed; however, trying to access a non-existent property of the schema is an error.
|
@ -0,0 +1,78 @@
|
||||
"use strict";
|
||||
|
||||
// Simple data type to represent a query path and corresponding schema path tied together, because these are basically always used together, and it would bloat up the implementation code otherwise
|
||||
|
||||
function createInstance({ queryPath, schemaPath, queryObject, schemaObject, parent }) {
|
||||
let self;
|
||||
// eslint-disable-next-line no-return-assign
|
||||
return self = {
|
||||
queryPath: queryPath,
|
||||
schemaPath: schemaPath,
|
||||
query: queryObject,
|
||||
schema: schemaObject,
|
||||
child: function (queryKey) {
|
||||
let newQueryPath = (queryKey != null)
|
||||
? queryPath.concat([ queryKey ])
|
||||
: queryPath;
|
||||
|
||||
let newQueryObject = (queryKey != null)
|
||||
? queryObject[queryKey]
|
||||
: queryObject;
|
||||
|
||||
// TODO: Is this correct even when the queryKey is null, and so we remain at the same object?
|
||||
// $key is used for handling aliases
|
||||
let effectiveSchemaKey = (newQueryObject?.$key != null)
|
||||
? newQueryObject.$key
|
||||
: queryKey;
|
||||
|
||||
let newSchemaPath = (effectiveSchemaKey != null)
|
||||
? schemaPath.concat([ effectiveSchemaKey ])
|
||||
: schemaPath;
|
||||
|
||||
let newSchemaObject = (effectiveSchemaKey != null)
|
||||
? schemaObject[effectiveSchemaKey]
|
||||
: schemaObject;
|
||||
|
||||
return createInstance({
|
||||
queryPath: newQueryPath,
|
||||
schemaPath: newSchemaPath,
|
||||
queryObject: newQueryObject,
|
||||
schemaObject: newSchemaObject,
|
||||
parent: self
|
||||
});
|
||||
},
|
||||
parent: parent,
|
||||
override: function ({ query, schema }) {
|
||||
return createInstance({
|
||||
queryPath: queryPath,
|
||||
schemaPath: schemaPath,
|
||||
queryObject: query ?? queryObject,
|
||||
schemaObject: schema ?? schemaObject,
|
||||
// An override doesn't change the path, so the parent shouldn't change either
|
||||
parent: self.parent
|
||||
});
|
||||
},
|
||||
toPathString: function () {
|
||||
return queryPath
|
||||
.map((segment, i) => {
|
||||
if (segment === schemaPath[i]) {
|
||||
return segment;
|
||||
} else {
|
||||
// This is used for representing aliases, showing the original schema key in brackets
|
||||
return `${segment} [${schemaPath[i]}]`;
|
||||
}
|
||||
})
|
||||
.join(" -> ");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = function createCursor({ query, schema }) {
|
||||
return createInstance({
|
||||
queryPath: [],
|
||||
schemaPath: [],
|
||||
queryObject: query,
|
||||
schemaObject: schema,
|
||||
parent: undefined
|
||||
});
|
||||
};
|
@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
|
||||
// A strict deep-merging implementation that *only* merges regular objects, and prevents prototype pollution
|
||||
|
||||
function isObject(value) {
|
||||
// TODO: Disallow special object types, for statically defined values (or just disallow specifying static values in the root schema in dlayer?)
|
||||
return (value != null && typeof value === "object" && !Array.isArray(value));
|
||||
}
|
||||
|
||||
module.exports = function deepMerge(a, b) {
|
||||
let merged = Object.create(null);
|
||||
let keys = new Set([ ... Object.keys(a), ... Object.keys(b) ]);
|
||||
|
||||
for (let key of keys) {
|
||||
// Technically over-blocks *any* 'constructor' key
|
||||
if (key === "__proto__" || key === "constructor") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let valueA = a[key];
|
||||
let valueB = b[key];
|
||||
|
||||
if (isObject(valueA) && valueB === undefined) {
|
||||
merged[key] = valueA;
|
||||
} else if (isObject(valueB) && valueA === undefined) {
|
||||
merged[key] = valueB;
|
||||
} else if (isObject(valueA) && isObject(valueB)) {
|
||||
merged[key] = deepMerge(valueA, valueB);
|
||||
} else if (!isObject(valueA) && !isObject(valueB)) {
|
||||
merged[key] = valueB ?? valueA;
|
||||
} else {
|
||||
// FIXME: Identifiable error type, and include the error path as well
|
||||
throw new Error("Cannot merge non-object into object");
|
||||
}
|
||||
}
|
||||
|
||||
return merged;
|
||||
};
|
@ -0,0 +1,48 @@
|
||||
A module system for your data sources.
|
||||
|
||||
```js
|
||||
{
|
||||
system: {
|
||||
metrics: {
|
||||
loadAverage: true
|
||||
},
|
||||
hardware: {
|
||||
drives: {
|
||||
path: true,
|
||||
size: true
|
||||
}
|
||||
},
|
||||
lvm: {
|
||||
physicalVolumes: {
|
||||
$collection: {
|
||||
$$create: { path: "/dev/sda1" }
|
||||
},
|
||||
path: true
|
||||
$$update: { enabled: true }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
# Special schema keys
|
||||
|
||||
- `$anyKey`: specifies a wildcard/fallback handler, that will handle *any* property specified in the query that isn't explicitly specified in the schema. Use sparingly, as it poses API design/maintenance hazards; it will be difficult to add new explicit keys later on without breaking existing queries.
|
||||
- `$mutations`: used to specify mutation methods that the user can call on objects. Specified as an object that maps mutation names to their corresponding functions.
|
||||
- `{ $get, $mutations }`: specified as an object in place of where normally a function or literal value would be expected to be returned; specifically meant for collections where you want to both allow the collection to be fetched, *and* for the user to specify collection-related mutations (eg. "create new item") within it in a query. This special syntax exists because collections/lists are "transparent" in dlayer and there is no way to specify collection-level behaviour otherwise. `$mutations` works the same as in its standalone version.
|
||||
|
||||
# Special query keys
|
||||
|
||||
- `$recurse`: set to `true` if you want the query to recursively traverse nested objects; eg. `... children: { $recurse: true } ...`. The recursive query will be for all the keys of the *parent* object, as well as any additional properties specified adjacent to the `$recurse` key. The key under which this special object is specified, determines what key will be assumed to contain the recursive children.
|
||||
- `$recurseLimit` (default `10`): how many levels of depth the recursion may continue for until being cut off. reaching the limit does not fail the query; it merely stops recursing any further.
|
||||
- `$allowErrors`: this property may yield an error upon evaluation *without* failing the query as a whole. instead of returning the value for the property directly, a Result object will be produced that represents either the success value or the error that was encountered. Your code still needs to handle the error case in some way. Typically useful when components of the query are expected to fail due to external circumstances that the requesting user cannot control (eg. a dependency on another network service).
|
||||
- `$arguments`: used to specify an object of named arguments for either a property or a mutation. Optional for properties; required for mutations (even if left empty).
|
||||
- `$key`: used to override what schema key to fetch; it will attempt to access the specified key, instead of assuming that the schema key equals the key in the query. Typically used to either alias properties (using a different name in the response than how it is defined in the schema), or to access the same property multiple times with different arguments (eg. filters) and expose the results as different keys.
|
||||
- `$collection`: used to invoke mutations on a (transparent) collection instead of its members
|
||||
|
||||
# Special context keys
|
||||
|
||||
These keys can be used within property handlers.
|
||||
|
||||
- `$getProperty(object, property)`: evaluate/fetch a different property on the object, and return it (in a Promise). You can reference `this` within the property handler as the `object` to evaluate a property of the object you're currently working with.
|
||||
- `$getPropertyPath(object, propertyPath)`: the same as above, but for a property *path* represented as an array of property names (or a dotpath string).
|
@ -0,0 +1,322 @@
|
||||
"use strict";
|
||||
|
||||
const Promise = require("bluebird");
|
||||
const mapObject = require("map-obj");
|
||||
const Result = require("@joepie91/result");
|
||||
|
||||
const createCursor = require("./cursor");
|
||||
const deepMerge = require("./deep-merge");
|
||||
const loadModules = require("./load-modules");
|
||||
|
||||
// TODO: Bounded/unbounded recursion
|
||||
// TODO: Should we allow async context generation? Both in root schema *and* in modules
|
||||
// TODO: $required query predicate
|
||||
// TODO: Lazy query objects, which get initialized by calling a function that gets the parent object as an argument? This would not be serializable over the network!
|
||||
// FIXME: $getProperty, $getPropertyPath, maybe $resolveObject/$query?
|
||||
// FIXME: Allow setting an evaluation depth limit for queries, to limit eg. recursion
|
||||
// FIXME: recurseDepth, recurseLabel/recurseGoto
|
||||
|
||||
/* Process design:
|
||||
|
||||
- The process starts with:
|
||||
1. A query tree, a nested object representing the query from the user
|
||||
2. A schema tree, an abstract tree of nested objects specified by the API; notably, the full tree is not known upfront, and parts may be discovered asynchronously or even generated dynamically
|
||||
3. A cursor pointing at the root
|
||||
- All of these objects are immutable, including the cursor (a child cursor is created, you don't mutate the existing cursor)
|
||||
- The cursor is an object that represents a specific position in the query *and* schema tree; it has no understanding of the API structure, nor any ability to evaluate any handlers. All it does is keep track of what part of the query and schema tree we're currently dealing with, accounting for aliases where appropriate, and ensuring that the two pointers move in tandem.
|
||||
- We recurse into the query tree according to the query the user specified, moving along in the schema tree accordingly. Branches that are specified in the schema tree but not in the query tree, will not be visited.
|
||||
- At each step along the way, we have a cursor pointing at the schema item being processed currently; that schema item can be some static subtree, or eg. a value generated by a previous handler.
|
||||
- From that cursor, for each step, we generate an 'instruction'; this is a parsed representation of that query item's querying rules, as well as information on what to do with the result once that item has been evaluated. This instruction is used to evaluate the relevant handler and then continue recursing with child rules (or, in the case of an actual recursive query, duplicating the previous rules).
|
||||
- From the result, we then generate a new cursor for the child items. And so on, and so forth.
|
||||
|
||||
*/
|
||||
|
||||
/* Recursion design:
|
||||
When setting `$recurse: true` on a child property, the parent schema gets duplicated with the child schema merged into it, and the resulting combined schema is used for the recursive fetching. Because the child schema can explicitly set properties to false, this allows for both "fetch in parent but not in recursed children" cases (true in parent, false in child) and "fetch in recursed children but not in parent" cases (unspecified or false in parent, true in child).
|
||||
The schema merging will eventually become deep-merging, when multi-level recursion is implemented (ie. the possibility to recurse indirectly).
|
||||
*/
|
||||
|
||||
const specialKeyRegex = /^\$[^\$]/;
|
||||
|
||||
function maybeCall(value, args, thisContext) {
|
||||
return Promise.try(() => {
|
||||
// FIXME: Only do this for actual fetch requests
|
||||
let getter = (typeof value === "object" && value != null && value.$get != null)
|
||||
? value.$get
|
||||
: value;
|
||||
|
||||
if (typeof getter === "function") {
|
||||
return getter.call(thisContext, ...args);
|
||||
} else {
|
||||
return getter;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function isObject(value) {
|
||||
// FIXME: Replace this with a more sensible check, like is-plain-object
|
||||
return (value != null && typeof value === "object" && !Array.isArray(value));
|
||||
}
|
||||
|
||||
// TODO: Move to separate package, decide whether to keep the nested array detection or not - that should probably just be part of the handler?
|
||||
function mapMaybeArray(value, handler) {
|
||||
// NOTE: This is async!
|
||||
if (Array.isArray(value)) {
|
||||
return Promise.map(value, (item, i) => {
|
||||
if (Array.isArray(item)) {
|
||||
throw new Error(`Encountered a nested array, which is not allowed; maybe you forgot to flatten it?`);
|
||||
} else {
|
||||
return handler(item, i);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
return handler(value);
|
||||
}
|
||||
}
|
||||
|
||||
/* Possible values of a schema property:
|
||||
true, null, object with only special keys (but not $recurse) -- fetch value and return it as-is
|
||||
false -- do not fetch value at all
|
||||
object with $recurse -- recursively fetch, optionally with extra keys to fetch (or ignore) for recursed children only, inheriting the rest of the schema from the parent
|
||||
object with regular non-special keys -- fetch object and continue fetching into child properties according to the schema
|
||||
|
||||
a "special key" is any key that is prefixed with $ - they are used to provide additional parameters to dlayer, and cannot be used for business logic keys
|
||||
*/
|
||||
|
||||
function asyncMapObject(object, handler) {
|
||||
return Promise.props(mapObject(object, handler));
|
||||
}
|
||||
|
||||
function analyzeSubquery(subquery) {
|
||||
let isRecursive = (subquery?.$recurse === true);
|
||||
let allowErrors = (subquery?.$allowErrors === true);
|
||||
let hasChildKeys = isObject(subquery) && Object.keys(subquery).some((key) => !specialKeyRegex.test(key));
|
||||
let isLeaf = (subquery === true || subquery === null || (!hasChildKeys && !isRecursive));
|
||||
let args = subquery?.$arguments ?? {};
|
||||
|
||||
return { isRecursive, allowErrors, hasChildKeys, isLeaf, args };
|
||||
}
|
||||
|
||||
function makeInstruction(cursor, queryKey) {
|
||||
let childCursor = cursor.child(queryKey);
|
||||
let handler = childCursor.schema ?? cursor.schema.$anyKey;
|
||||
|
||||
return {
|
||||
... analyzeSubquery(childCursor.query),
|
||||
cursor: childCursor,
|
||||
handler: handler
|
||||
};
|
||||
}
|
||||
|
||||
function assignErrorPath(error, cursor) {
|
||||
if (error.path == null) {
|
||||
// Only assign the path if it hasn't already happened at a deeper level; this is a recursive function after all
|
||||
error.path = cursor.queryPath;
|
||||
error.message = error.message + ` (${cursor.toPathString()})`;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: build a sample todo-list schema for testing out fetches, mutations, and combinations of them, including on collections
|
||||
|
||||
function makeEnvironment(context, getContextForModule) {
|
||||
function callHandler(instruction) {
|
||||
// NOTE: cursor is assumed to already be the key of the child
|
||||
let { schemaKey, handler, args, allowErrors, cursor } = instruction;
|
||||
|
||||
if (handler != null) {
|
||||
return Promise.try(() => {
|
||||
// This calls the data provider in the schema
|
||||
if (handler.__moduleID != null) {
|
||||
// Defined in a module
|
||||
return Result.wrapAsync(() => maybeCall(handler.func, [ args, getContextForModule(handler.__moduleID) ], cursor.parent.schema));
|
||||
} else {
|
||||
// Defined in the root schema
|
||||
return Result.wrapAsync(() => maybeCall(handler, [ args, context ], cursor.parent.schema));
|
||||
}
|
||||
}).then((result) => {
|
||||
if (result.isOK) {
|
||||
return result.value();
|
||||
} else {
|
||||
let error = result.error();
|
||||
|
||||
if (error.__dlayerAcceptableError === true) {
|
||||
if (allowErrors === true) {
|
||||
return Result.error(error.inner);
|
||||
} else {
|
||||
throw error.inner;
|
||||
}
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}).tapCatch((error) => {
|
||||
// FIXME: Chain properly
|
||||
assignErrorPath(error, cursor);
|
||||
});
|
||||
} else {
|
||||
throw new Error(`No key '${schemaKey}' exists in the schema`);
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: instruction abstraction?
|
||||
|
||||
function applyToResultValue(instruction, value, query) {
|
||||
let { cursor } = instruction;
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return Promise.map(value, (item, i) => {
|
||||
let itemCursor = cursor
|
||||
.child(i, i)
|
||||
.override({
|
||||
query: query,
|
||||
schema: item
|
||||
});
|
||||
|
||||
return applyRules(itemCursor);
|
||||
});
|
||||
} else {
|
||||
let itemCursor = cursor
|
||||
.override({
|
||||
query: query,
|
||||
schema: value
|
||||
});
|
||||
|
||||
return applyRules(itemCursor);
|
||||
}
|
||||
}
|
||||
|
||||
function applyRules(cursor) {
|
||||
// map query object -> result object
|
||||
return asyncMapObject(cursor.query, (queryKey, subquery) => {
|
||||
let shouldFetch = (subquery !== false);
|
||||
|
||||
if (!shouldFetch || specialKeyRegex.test(queryKey)) {
|
||||
// When constructing the result object, we only care about the 'real' keys, not about special meta-keys like $key; those get processed in the actual resolution logic itself.
|
||||
return mapObject.mapObjectSkip;
|
||||
} else {
|
||||
// FIXME: This is hacky, and should be made more ergonomic...
|
||||
return [
|
||||
queryKey,
|
||||
Promise.try(async () => {
|
||||
let instruction = makeInstruction(cursor, queryKey);
|
||||
let value = await callHandler(instruction);
|
||||
|
||||
let effectiveSubquery = (instruction.isRecursive)
|
||||
? { ... cursor.query, ... subquery }
|
||||
: subquery;
|
||||
|
||||
let finalValue = (instruction.isLeaf || value == null)
|
||||
? value
|
||||
: applyToResultValue(instruction, value, effectiveSubquery);
|
||||
|
||||
// FIXME: We're absorbing Result.errors here, but that's a bit weird. We should probably be consistently carrying Result values throughout the implementation, and only unwrap them at the last moment?
|
||||
return (instruction.allowErrors)
|
||||
? Result.ok(finalValue)
|
||||
: finalValue;
|
||||
})
|
||||
];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return applyRules;
|
||||
}
|
||||
|
||||
module.exports = function createDLayer(options) {
|
||||
// options = { schema, makeContext }
|
||||
|
||||
let loaded = loadModules(options.modules ?? []);
|
||||
let schema = deepMerge(loaded.root, options.schema);
|
||||
|
||||
return {
|
||||
query: function (query, context) {
|
||||
let generatedContext = (options.makeContext != null)
|
||||
? options.makeContext()
|
||||
: {};
|
||||
|
||||
function getProperty(object, property, args = {}) {
|
||||
// TODO: Should this allow a single-argument, property-string-only variant for looking up properties on self?
|
||||
// FIXME: Validatem
|
||||
if (object == null) {
|
||||
throw new Error(`Empty object passed`);
|
||||
}
|
||||
|
||||
if (property in object) {
|
||||
return maybeCall(object[property], [ args, combinedContext ], object);
|
||||
} else {
|
||||
// FIXME: Better error message with path
|
||||
throw new Error(`No key '${property}' exists in the schema`);
|
||||
}
|
||||
}
|
||||
|
||||
function make(typeID, args, existenceRequired) {
|
||||
let type = loaded.types[typeID].func;
|
||||
|
||||
if (type == null) {
|
||||
if (existenceRequired === true) {
|
||||
throw new Error(`No type named '${typeID}' exists`);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
let instance = type(args);
|
||||
|
||||
if (loaded.extensions[typeID] != null) {
|
||||
for (let [ key, extension ] of Object.entries(loaded.extensions[typeID])) {
|
||||
// TODO: Possibly make this more performant by making it possible to do an Object.assign? Or does that not matter for performance?
|
||||
instance[key] = extension.func;
|
||||
}
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
|
||||
let combinedContext = {
|
||||
... generatedContext,
|
||||
... context,
|
||||
// FIXME: Figure out a way to annotate errors here with the path at which they occurred, *and* make clear that it was an internal property lookup
|
||||
$getProperty: getProperty,
|
||||
$getPropertyPath: function (object, propertyPath) {
|
||||
let parsedPath = (typeof propertyPath === "string")
|
||||
? propertyPath.split(".")
|
||||
: propertyPath;
|
||||
|
||||
return Promise.reduce(parsedPath, (currentObject, pathSegment) => {
|
||||
if (currentObject != null) {
|
||||
return getProperty(currentObject, pathSegment);
|
||||
} else {
|
||||
// Effectively null-coalescing
|
||||
return null;
|
||||
}
|
||||
}, object);
|
||||
},
|
||||
$make: function (typeID, args) {
|
||||
return make(typeID, args, true);
|
||||
},
|
||||
$maybeMake: function (typeID, args) {
|
||||
return make(typeID, args, false);
|
||||
}
|
||||
};
|
||||
|
||||
let cursor = createCursor({
|
||||
query: query,
|
||||
schema: schema
|
||||
});
|
||||
|
||||
let evaluate = makeEnvironment(combinedContext, loaded.makeContextFactory(combinedContext));
|
||||
|
||||
// FIXME: Currently, top-level errors do not get a path property assigned to them, because that assignment happens on nested calls above
|
||||
return evaluate(cursor);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
module.exports.markAcceptableError = function (error) {
|
||||
return {
|
||||
__dlayerAcceptableError: true,
|
||||
inner: error
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -0,0 +1,128 @@
|
||||
"use strict";
|
||||
|
||||
// const mergeByTemplate = require("merge-by-template");
|
||||
const syncpipe = require("syncpipe");
|
||||
const deepMerge = require("./deep-merge");
|
||||
|
||||
/*
|
||||
Take a list of modules; each module specifies a name, schema root, types, type extensions, and a context factory function. Each module is internally assigned a unique ID. This unique ID is associated with each type factory and type extension method, and used as the key for a map of context factories; that way, upon invoking those methods, the module's own corresponding context can be injected. Only a single context should be created per module per request, so there should be a cache layer for the contexts (keyed by module ID), with each request creating a new cache.
|
||||
*/
|
||||
|
||||
// NOTE: This can be global because we identify existing assignments by object identity, and that will never conflict
|
||||
let numberedModules = new WeakMap();
|
||||
let currentModuleNumber = 0;
|
||||
|
||||
function getModuleID(module) {
|
||||
if (!numberedModules.has(module)) {
|
||||
numberedModules.set(module, currentModuleNumber++);
|
||||
}
|
||||
|
||||
return numberedModules.get(module);
|
||||
}
|
||||
|
||||
function createTypeTracker() {
|
||||
let typeFactories = {};
|
||||
|
||||
return {
|
||||
add: function (module, name, factory) {
|
||||
if (typeFactories[name] != null) {
|
||||
let existingEntry = typeFactories[name];
|
||||
throw new Error(`Type '${name}' already exists (from module '${module.name}', already defined by module '${existingEntry.source.name}')`);
|
||||
} else {
|
||||
typeFactories[name] = {
|
||||
source: module,
|
||||
// No context provided to type factory functions for now, since they are not allowed to be async for now anyway
|
||||
// FIXME: Maybe add a warning if the user returns a Promise from a factory, asking them to file a bug if they really need it?
|
||||
// func: wrapModuleFunction(module, factory)
|
||||
func: factory
|
||||
};
|
||||
}
|
||||
},
|
||||
get: function () {
|
||||
return typeFactories;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createExtensionTracker() {
|
||||
let extendedTypes = {};
|
||||
|
||||
return {
|
||||
add: function (module, type, name, method) {
|
||||
if (extendedTypes[type] == null) {
|
||||
extendedTypes[type] = {};
|
||||
}
|
||||
|
||||
let extensions = extendedTypes[type];
|
||||
|
||||
if (extensions[name] != null) {
|
||||
let existingEntry = extensions[name];
|
||||
throw new Error(`Type '${type}' already has a method extension named '${name}' (from module '${module.name}', already defined by module '${existingEntry.source.name}')`);
|
||||
} else {
|
||||
extensions[name] = {
|
||||
source: module,
|
||||
func: wrapModuleFunction(module, method)
|
||||
};
|
||||
}
|
||||
},
|
||||
get: function () {
|
||||
return extendedTypes;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function wrapModuleFunction(module, func) {
|
||||
return { __moduleID: getModuleID(module), func: func };
|
||||
}
|
||||
|
||||
function defaultContext() {
|
||||
// Fallback function that generates an empty context, for when a module doesn't specify a makeContext handler
|
||||
return {};
|
||||
}
|
||||
|
||||
module.exports = function (modules) {
|
||||
// TODO: Eventually replace hand-crafted merging logic with merge-by-template, once it can support this usecase properly(tm)
|
||||
// TODO: Fix merge-by-template so that reasonable error messages can be generated here, that are actually aware of eg. the conflicting key
|
||||
|
||||
let types = createTypeTracker();
|
||||
let typeExtensions = createExtensionTracker();
|
||||
|
||||
let contextFactories = syncpipe(modules, [
|
||||
_ => _.map((module) => [ getModuleID(module), module.makeContext ?? defaultContext ]),
|
||||
_ => new Map(_)
|
||||
]);
|
||||
|
||||
let schemaRoots = modules.map((module) => module.root ?? {});
|
||||
|
||||
for (let module of modules) {
|
||||
for (let [ type, factory ] of Object.entries(module.types ?? {})) {
|
||||
types.add(module, type, factory);
|
||||
}
|
||||
|
||||
for (let [ type, extensions ] of Object.entries(module.extensions ?? {})) {
|
||||
for (let [ name, method ] of Object.entries(extensions)) {
|
||||
typeExtensions.add(module, type, name, method);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
root: schemaRoots.reduce(deepMerge, {}),
|
||||
types: types.get(),
|
||||
extensions: typeExtensions.get(),
|
||||
makeContextFactory: function (baseContext) {
|
||||
let cache = new Map();
|
||||
|
||||
return function makeContextForModule(moduleID) {
|
||||
if (!cache.has(moduleID)) {
|
||||
cache.set(moduleID, {
|
||||
... baseContext,
|
||||
... contextFactories.get(moduleID)()
|
||||
});
|
||||
}
|
||||
|
||||
return cache.get(moduleID);
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
@ -0,0 +1,137 @@
|
||||
"use strict";
|
||||
|
||||
const Promise = require("bluebird");
|
||||
const dlayer = require("..");
|
||||
const syncpipe = require("syncpipe");
|
||||
|
||||
let fakeDriveTree = {
|
||||
one: [ "/dev/1a", "/dev/1b" ],
|
||||
two: [ "/dev/2a" ]
|
||||
};
|
||||
|
||||
let invertedTree = {
|
||||
"/dev/1a": "one",
|
||||
"/dev/1b": "one",
|
||||
"/dev/2a": "two"
|
||||
};
|
||||
|
||||
let contextCounter = 1;
|
||||
|
||||
// FIXME: Disallow type name conflicts!
|
||||
|
||||
let moduleDrives = {
|
||||
name: "Drives",
|
||||
types: {
|
||||
"sysquery.core.Drive": function ({ name }) {
|
||||
return {
|
||||
name: name
|
||||
};
|
||||
}
|
||||
},
|
||||
extensions: {
|
||||
"sysquery.core.BlockDevice": {
|
||||
drive: async function (_, { counter, $getProperty, $make }) {
|
||||
console.log(`[context ${counter}] BlockDevice::drive`);
|
||||
|
||||
return $make("sysquery.core.Drive", {
|
||||
name: invertedTree[await $getProperty(this, "path")]
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
root: {
|
||||
hardware: {
|
||||
drives: function ({ names }, { counter, $make }) {
|
||||
console.log(`[context ${counter}] root::drives`);
|
||||
|
||||
return syncpipe(fakeDriveTree, [
|
||||
_ => Object.entries(_),
|
||||
_ => (names != null)
|
||||
? _.filter(([ name, _devices ]) => names.includes(name))
|
||||
: _,
|
||||
_ => _.map(([ name, _devices ]) => $make("sysquery.core.Drive", { name }))
|
||||
]);
|
||||
}
|
||||
}
|
||||
},
|
||||
makeContext: () => {
|
||||
return {
|
||||
counter: contextCounter++
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let moduleBlockDevices = {
|
||||
name: "Block Devices",
|
||||
types: {
|
||||
"sysquery.core.BlockDevice": function ({ path }) {
|
||||
return {
|
||||
path: path
|
||||
};
|
||||
}
|
||||
},
|
||||
extensions: {
|
||||
"sysquery.core.Drive": {
|
||||
blockDevices: async function (_, { counter, $getProperty, $make }) {
|
||||
console.log(`[context ${counter}] Drive::blockDevices`);
|
||||
|
||||
return fakeDriveTree[await $getProperty(this, "name")].map((path) => {
|
||||
return $make("sysquery.core.BlockDevice", { path });
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
root: {
|
||||
hardware: {
|
||||
blockDevices: function ({ paths }, { counter, $make }) {
|
||||
console.log(`[context ${counter}] root::blockDevices`);
|
||||
|
||||
return syncpipe(fakeDriveTree, [
|
||||
_ => Object.values(_),
|
||||
_ => _.flat(),
|
||||
_ => (paths != null)
|
||||
? _.filter((path) => paths.includes(path))
|
||||
: _,
|
||||
_ => _.map((path) => $make("sysquery.core.BlockDevice", { path }))
|
||||
]);
|
||||
}
|
||||
}
|
||||
},
|
||||
makeContext: () => {
|
||||
return {
|
||||
counter: contextCounter++
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let api = dlayer({
|
||||
schema: {
|
||||
foo: {
|
||||
bar: "baz"
|
||||
}
|
||||
},
|
||||
modules: [ moduleBlockDevices, moduleDrives ]
|
||||
});
|
||||
|
||||
return Promise.try(() => {
|
||||
return api.query({
|
||||
hardware: {
|
||||
blockDevices: {
|
||||
$arguments: { paths: [ "/dev/1b" ] },
|
||||
path: true,
|
||||
drive: {
|
||||
name: true,
|
||||
blockDevices: {
|
||||
path: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}).then((result) => {
|
||||
console.log("-- result:");
|
||||
console.dir(result, {depth: null});
|
||||
}).catch((error) => {
|
||||
console.log("Unhandled error:");
|
||||
console.dir(error);
|
||||
});
|
@ -0,0 +1,4 @@
|
||||
TODO:
|
||||
- $call, for calling non-idempotent functions, requiring a (potentially empty) list of arguments
|
||||
- $repeat modifier, accepting an array of attributes to repeat the given attribute/function with, the results are an array in the same order - share the top-level properties among all of them
|
||||
- for named repeats, the user can use the alias feature instead? though no way to share properties in that case
|
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "dlayer",
|
||||
"version": "0.1.0",
|
||||
"main": "index.js",
|
||||
"repository": "https://git.cryto.net/joepie91/dlayer.git",
|
||||
"author": "Sven Slootweg <admin@cryto.net>",
|
||||
"license": "WTFPL OR CC0-1.0",
|
||||
"dependencies": {
|
||||
"@joepie91/result": "^0.1.0",
|
||||
"bluebird": "^3.4.6",
|
||||
"map-obj": "^4.2.1",
|
||||
"syncpipe": "^1.0.0"
|
||||
}
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
|
||||
const Promise = require("bluebird");
|
||||
const dlayer = require("./");
|
||||
|
||||
const loaders = require("../../api/data-sources");
|
||||
|
||||
let schema = {
|
||||
hardware: {
|
||||
drives: function () {
|
||||
return [{
|
||||
name: "foo",
|
||||
size: () => "4 GiB"
|
||||
}, {
|
||||
name: "bar",
|
||||
size: () => "2 TiB"
|
||||
}];
|
||||
},
|
||||
primaryNetworkInterface: function () {
|
||||
return {
|
||||
name: "baz",
|
||||
dataRate: () => "2.5 gbps"
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let api = dlayer({
|
||||
schema: schema
|
||||
});
|
||||
|
||||
return Promise.try(() => {
|
||||
return api.query({
|
||||
hardware: {
|
||||
drives: {
|
||||
name: true,
|
||||
size: true
|
||||
},
|
||||
primaryNetworkInterface: {
|
||||
name: true
|
||||
}
|
||||
}
|
||||
});
|
||||
}).then((result) => {
|
||||
console.dir(result, {depth: null});
|
||||
}).catch((error) => {
|
||||
console.dir("Unhandled error", error);
|
||||
});
|
@ -0,0 +1,30 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@joepie91/result@^0.1.0":
|
||||
version "0.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@joepie91/result/-/result-0.1.0.tgz#187b97033edf200698ad159ea0edc907fce2cad0"
|
||||
integrity sha512-2qjcinMrUV1FSA4g5AG6t32ijGTmcUzY5XIFJoNP0zQYtlM/C2NaLDcFHtwgASTMW0p3ZIkgueGlvwQe0S7Kxg==
|
||||
|
||||
assure-array@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/assure-array/-/assure-array-1.0.0.tgz#4f4ad16a87659d6200a4fb7103462033d216ec1f"
|
||||
integrity sha512-igvOvGYidAcJKr6YQIHzLivUpAdqUfi7MN0QfrEnFtifQvuw6D0W4oInrIVgTaefJ+QBVWAj8ZYuUGNnwq6Ydw==
|
||||
|
||||
bluebird@^3.4.6:
|
||||
version "3.7.2"
|
||||
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
|
||||
integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==
|
||||
|
||||
map-obj@^4.2.1:
|
||||
version "4.3.0"
|
||||
resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-4.3.0.tgz#9304f906e93faae70880da102a9f1df0ea8bb05a"
|
||||
integrity sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==
|
||||
|
||||
syncpipe@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/syncpipe/-/syncpipe-1.0.0.tgz#170340f813150bc8fcb8878b1b9c71ea0ccd3727"
|
||||
integrity sha512-cdiAFTnFJRvUaNPDc2n9CqoFvtIL3+JUMJZrC3kA3FzpugHOqu0TvkgNwmnxPZ5/WjAzMcfMS3xm+AO7rg/j/w==
|
||||
dependencies:
|
||||
assure-array "^1.0.0"
|
Loading…
Reference in New Issue