WIP
parent
5be1872be3
commit
b9fc50c0d2
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"extends": "@joepie91/eslint-config/react",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2020,
|
||||||
|
"sourceType": "script"
|
||||||
|
},
|
||||||
|
"parser": "babel-eslint",
|
||||||
|
"plugins": [
|
||||||
|
"babel",
|
||||||
|
"import"
|
||||||
|
],
|
||||||
|
"rules": {
|
||||||
|
"import/no-extraneous-dependencies": 2,
|
||||||
|
"import/no-unresolved": [2, { "commonjs": true }]
|
||||||
|
}
|
||||||
|
}
|
@ -1,78 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
"env": {
|
|
||||||
"browser": true,
|
|
||||||
"commonjs": true,
|
|
||||||
"es6": true,
|
|
||||||
"node": true
|
|
||||||
},
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaFeatures": {
|
|
||||||
"experimentalObjectRestSpread": true,
|
|
||||||
"jsx": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"plugins": [
|
|
||||||
"react"
|
|
||||||
],
|
|
||||||
"rules": {
|
|
||||||
/* Things that should effectively be syntax errors. */
|
|
||||||
"indent": [ "error", "tab", {
|
|
||||||
SwitchCase: 1
|
|
||||||
}],
|
|
||||||
"linebreak-style": [ "error", "unix" ],
|
|
||||||
"semi": [ "error", "always" ],
|
|
||||||
/* Things that are always mistakes. */
|
|
||||||
"getter-return": [ "error" ],
|
|
||||||
"no-compare-neg-zero": [ "error" ],
|
|
||||||
"no-dupe-args": [ "error" ],
|
|
||||||
"no-dupe-keys": [ "error" ],
|
|
||||||
"no-duplicate-case": [ "error" ],
|
|
||||||
"no-empty": [ "error" ],
|
|
||||||
"no-empty-character-class": [ "error" ],
|
|
||||||
"no-ex-assign": [ "error" ],
|
|
||||||
"no-extra-semi": [ "error" ],
|
|
||||||
"no-func-assign": [ "error" ],
|
|
||||||
"no-invalid-regexp": [ "error" ],
|
|
||||||
"no-irregular-whitespace": [ "error" ],
|
|
||||||
"no-obj-calls": [ "error" ],
|
|
||||||
"no-sparse-arrays": [ "error" ],
|
|
||||||
"no-undef": [ "error" ],
|
|
||||||
"no-unreachable": [ "error" ],
|
|
||||||
"no-unsafe-finally": [ "error" ],
|
|
||||||
"use-isnan": [ "error" ],
|
|
||||||
"valid-typeof": [ "error" ],
|
|
||||||
"curly": [ "error" ],
|
|
||||||
"no-caller": [ "error" ],
|
|
||||||
"no-fallthrough": [ "error" ],
|
|
||||||
"no-extra-bind": [ "error" ],
|
|
||||||
"no-extra-label": [ "error" ],
|
|
||||||
"array-callback-return": [ "error" ],
|
|
||||||
"prefer-promise-reject-errors": [ "error" ],
|
|
||||||
"no-with": [ "error" ],
|
|
||||||
"no-useless-concat": [ "error" ],
|
|
||||||
"no-unused-labels": [ "error" ],
|
|
||||||
"no-unused-expressions": [ "error" ],
|
|
||||||
"no-unused-vars": [ "error" , { argsIgnorePattern: "^_" } ],
|
|
||||||
"no-return-assign": [ "error" ],
|
|
||||||
"no-self-assign": [ "error" ],
|
|
||||||
"no-new-wrappers": [ "error" ],
|
|
||||||
"no-redeclare": [ "error" ],
|
|
||||||
"no-loop-func": [ "error" ],
|
|
||||||
"no-implicit-globals": [ "error" ],
|
|
||||||
"strict": [ "error", "global" ],
|
|
||||||
/* Make JSX not cause 'unused variable' errors. */
|
|
||||||
"react/jsx-uses-react": ["error"],
|
|
||||||
"react/jsx-uses-vars": ["error"],
|
|
||||||
/* Development code that should be removed before deployment. */
|
|
||||||
"no-console": [ "warn" ],
|
|
||||||
"no-constant-condition": [ "warn" ],
|
|
||||||
"no-debugger": [ "warn" ],
|
|
||||||
"no-alert": [ "warn" ],
|
|
||||||
"no-warning-comments": ["warn", {
|
|
||||||
terms: ["fixme"]
|
|
||||||
}],
|
|
||||||
/* Common mistakes that can *occasionally* be intentional. */
|
|
||||||
"no-template-curly-in-string": ["warn"],
|
|
||||||
"no-unsafe-negation": [ "warn" ],
|
|
||||||
}
|
|
||||||
};
|
|
@ -0,0 +1,31 @@
|
|||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "node",
|
||||||
|
"request": "attach",
|
||||||
|
"name": "Attach to Process",
|
||||||
|
"address": "localhost",
|
||||||
|
"port": 9229,
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "${workspaceFolder}",
|
||||||
|
"restart": true,
|
||||||
|
"skipFiles": [
|
||||||
|
"<node_internals>/**",
|
||||||
|
"node_modules/**"
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Launch Program",
|
||||||
|
"program": "${workspaceFolder}/bin/server.js",
|
||||||
|
"skipFiles": [
|
||||||
|
"<node_internals>/**"
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1 @@
|
|||||||
|
{ "presets": ["@babel/preset-env"] }
|
@ -1,17 +1,18 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
|
const postgresqlSocketUrl = require("postgresql-socket-url");
|
||||||
const config = require("./config.json");
|
const config = require("./config.json");
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
client: "pg",
|
client: "pg",
|
||||||
connection: {
|
connection: {
|
||||||
database: "cvm",
|
connectionString: postgresqlSocketUrl({
|
||||||
charset: "utf8",
|
socketPath: "/tmp",
|
||||||
username: config.database.username,
|
database: config.database
|
||||||
password: config.database.password
|
})
|
||||||
},
|
},
|
||||||
pool: {
|
pool: {
|
||||||
min: 2,
|
min: 2,
|
||||||
max: 10
|
max: 10
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
@ -0,0 +1,140 @@
|
|||||||
|
MARKER:
|
||||||
|
- Replace local `unreachable` with @joepie91/unreachable
|
||||||
|
- Update all Validatem usage to new validateArguments API
|
||||||
|
- LVM / mdraid support and tabs (+ complete refactoring LVM implementation)
|
||||||
|
- Switch hashing to argon2id
|
||||||
|
- Switch child_process to execa
|
||||||
|
|
||||||
|
IDEAS:
|
||||||
|
- contextual sidebar on add/edit form pages that shows/highlights all the relevant data for deciding what to fill into the form
|
||||||
|
- eg. all storage devices and pools when creating a new volume
|
||||||
|
- or highlighting the currently-editing volume in an edit screen
|
||||||
|
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
API architecture
|
||||||
|
|
||||||
|
- Level 0: (src/wrappers) Data source implementations
|
||||||
|
- Eg. output-parsing wrappers using `execBinary`, but implementations might also be provided by a third-party module entirely
|
||||||
|
- The APIs for these are specific to the implementation
|
||||||
|
- Level 1: (src/api/data-sources) Data source connectors
|
||||||
|
- These provide a standardized interface over the data source implementations, exposing each individual semantically distinct operation as a function
|
||||||
|
- That function takes either of:
|
||||||
|
- An array of identifiers of 'items' to obtain information about
|
||||||
|
- The `All` symbol to obtain all items
|
||||||
|
- Level 2: (src/graphql/data-object) The 'data object' abstraction
|
||||||
|
- Takes in a definition of a GraphQL object's structure, and which properties should be obtained from what data source connectors
|
||||||
|
- Definition structured as (dataSource => (field => dataGetter))
|
||||||
|
- The `dataSource` may either be:
|
||||||
|
- The name of the data source connector to obtain the source data from
|
||||||
|
- The special `LocalProperties` symbol, which specifies:
|
||||||
|
- Data that is immediately known upon object instantiation, and doesn't require accessing a data source
|
||||||
|
- Eg. the identifier that the object was initialized with
|
||||||
|
- Functions that produce data objects of other types, the instantiation of which doesn't require accessing a data source
|
||||||
|
- Eg. because it is initialized with the same identifier
|
||||||
|
- The `field` may either be:
|
||||||
|
- A string name, in which case it defines how to resolve that specific property on the data object
|
||||||
|
- The special `ID` symbol, in which case it defines by which identifier to request the 'thing' from the data source connector.
|
||||||
|
- Usually this will be the identifier that the data object is initialized with.
|
||||||
|
- The `dataGetter` is either:
|
||||||
|
- A function, mapping from the source data to a value, called with (sourceData, queryArgs, context)
|
||||||
|
- sourceData: The result object originating from the data source lookup
|
||||||
|
- queryArgs: The arguments passed to the property access in the GraphQL query
|
||||||
|
- context: The full GraphQL context + 'properties' key if DependsOn is used
|
||||||
|
- A string, specifying the property to extract from the source data, equivalent to `(sourceData) => sourceData[property]`
|
||||||
|
- NOTE: The dataSources are not specified directly in the data object definition! They're provided via GraphQL context separately.
|
||||||
|
- Level 3: (src/api/types)
|
||||||
|
- The actual data object definitions
|
||||||
|
- Parametric modules, take the full set of types as their argument
|
||||||
|
- Specified as a *function that instantiates and returns* a newly created data object, when initialized with some sort of identifier value
|
||||||
|
- Eg. the 'path' for a block device, or the 'ID' for a user
|
||||||
|
- The instantiation function is free to choose the arguments it accepts for initialization (and how to use them), but a destructured object is recommended
|
||||||
|
|
||||||
|
------------
|
||||||
|
|
||||||
|
Dynamic data lookup
|
||||||
|
|
||||||
|
Sometimes there are special cases where we can't (reliably) obtain particular data from the same source, eg. the correct data source connector to invoke may be dependent on some other data in the object. Need to figure out an API that allows representing this ergonomically.
|
||||||
|
|
||||||
|
Maybe have an async "resolve these data sources" API that can be used from within a custom handler? This would sidestep the issue where particularly complex cases are hard or impossible to represent in a declarative format, by just making it custom logic entirely.
|
||||||
|
|
||||||
|
Maybe something similar for resolving properties defined elsewhere on the object? Otherwise any custom handler in the [Dynamic] block would invoke the handlers for *all* of these dependencies (which are specified on a block level), even when they are not needed for that particular handler.
|
||||||
|
|
||||||
|
-------------
|
||||||
|
|
||||||
|
execBinary redesign
|
||||||
|
|
||||||
|
- requireOnStdout
|
||||||
|
- expectOnStdout
|
||||||
|
- failOnStdout
|
||||||
|
|
||||||
|
- requireOnStderr
|
||||||
|
- expectOnStderr
|
||||||
|
- failOnStderr
|
||||||
|
|
||||||
|
Types of handling:
|
||||||
|
- requireOn*: a result must be produced by the parsing adapter
|
||||||
|
- expectOn*: a result *may* be produced by the parsing adapter
|
||||||
|
- failOn*: if a result is produced by the parsing adapter, that constitutes an error
|
||||||
|
|
||||||
|
Adapter:
|
||||||
|
A { create: Function, supportsStreams: Boolean } object that, upon initialization/calling `create`, returns a function that takes the string or stream of output, and returns a result or throws an error/NoResult. Example adapters:
|
||||||
|
- matchLiteral: match a literal string
|
||||||
|
- booleanResult: switches from "return undefined or throw NoResult" to "return true or false"
|
||||||
|
- matchRegex: match a regular expression and extract data
|
||||||
|
- matchPeg: run a PEG parser and use its output
|
||||||
|
- matchMultiple: run multiple adapters and combine the results into a single (keyed) object
|
||||||
|
|
||||||
|
matchMultiple example:
|
||||||
|
matchMultiple({
|
||||||
|
deviceName: matchRegex(/name: (.+)/, ([ name ]) => name),
|
||||||
|
isNVMe: matchLiteral("protocol: NVMe", { booleanResult: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
- Different kinds of output handling:
|
||||||
|
- expect*: require that the handler produce a result
|
||||||
|
- if result: OK
|
||||||
|
- if no result: fail
|
||||||
|
- if parsing error: fail
|
||||||
|
- handle*: optionally produce a result
|
||||||
|
- if result: OK
|
||||||
|
- if no result: OK
|
||||||
|
- if parsing error: fail
|
||||||
|
- fail*: when output is detected, produce an error
|
||||||
|
- if result: fail
|
||||||
|
- if no result: OK
|
||||||
|
- if parsing error: fail
|
||||||
|
|
||||||
|
- expectStderr (convert stderr to success result) vs. detectStderr (convert stderr to thrown error)
|
||||||
|
- expectStdout
|
||||||
|
- expectEmptyOutput
|
||||||
|
|
||||||
|
Create various utility methods for parsing stdout/stderr, that can be used separately within the expect* and detect* methods
|
||||||
|
|
||||||
|
Some sort of matchAll([ .. ]) utility for returning the results of multiple handlers/extractors? Maybe follow the 'messages' model that PostCSS follows?
|
||||||
|
Interceptor model? That can also produce messages, and modify the flags and such of the invocation
|
||||||
|
|
||||||
|
TODO: Publish error-chain! Separating out the error chaining itself, from the display
|
||||||
|
Adapt from other cause-retaining error types
|
||||||
|
full-chain instanceof?
|
||||||
|
|
||||||
|
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Glossary
|
||||||
|
|
||||||
|
Bind mount
|
||||||
|
"Mounts" a folder on one (mounted) filesystem, as a separate mount/filesystem, essentially mirroring it under another location
|
||||||
|
Loopback device
|
||||||
|
Virtual block device that can be mounted, and is backed by a *file* on another (mounted) filesystem.
|
||||||
|
|
||||||
|
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Utilities
|
||||||
|
|
||||||
|
fuser
|
||||||
|
Show which processes use the named files, sockets, or filesystems.
|
@ -0,0 +1,52 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const memoizee = require("memoizee");
|
||||||
|
const fs = Promise.promisifyAll(require("fs"));
|
||||||
|
const treecutter = require("../../packages/treecutter");
|
||||||
|
const findmnt = require("../../packages/exec-findmnt");
|
||||||
|
const shallowMerge = require("../../packages/shallow-merge");
|
||||||
|
const All = require("../../packages/graphql-interface/symbols/all");
|
||||||
|
|
||||||
|
module.exports = function () {
|
||||||
|
let findmntOnce = memoizee(() => {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return findmnt();
|
||||||
|
}).then((mounts) => {
|
||||||
|
return treecutter.flatten(mounts);
|
||||||
|
}).map((mount) => {
|
||||||
|
if (mount.sourceDevice?.startsWith("/")) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return fs.realpathAsync(mount.sourceDevice);
|
||||||
|
}).then((actualSourcePath) => {
|
||||||
|
return shallowMerge(mount, {
|
||||||
|
sourceDevice: actualSourcePath
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return mount;
|
||||||
|
}
|
||||||
|
}).then((list) => {
|
||||||
|
let tree = treecutter.rebuild(list);
|
||||||
|
|
||||||
|
return {
|
||||||
|
tree: tree,
|
||||||
|
list: list
|
||||||
|
};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return function (mountpoints) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return findmntOnce();
|
||||||
|
}).then(({tree, list}) => {
|
||||||
|
return mountpoints.map((mountpoint) => {
|
||||||
|
if (mountpoint === All) {
|
||||||
|
return tree;
|
||||||
|
} else {
|
||||||
|
return list.find((mount) => mount.mountpoint === mountpoint);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,13 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
|
||||||
|
const nvmeCli = require("../../../packages/exec-nvme-cli");
|
||||||
|
|
||||||
|
module.exports = function () {
|
||||||
|
return function (controllerPaths) {
|
||||||
|
return Promise.map(controllerPaths, (path) => {
|
||||||
|
return nvmeCli.listNamespaces({ devicePath: path });
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,87 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const fs = Promise.promisifyAll(require("fs"));
|
||||||
|
|
||||||
|
const {createDataObject, LocalProperties, ID, Dynamic} = require("../../packages/graphql-interface/data-object");
|
||||||
|
|
||||||
|
module.exports = function (types) {
|
||||||
|
return function Mount({ mountpoint }) {
|
||||||
|
return createDataObject({
|
||||||
|
[LocalProperties]: {
|
||||||
|
mountpoint: mountpoint
|
||||||
|
},
|
||||||
|
[Dynamic]: {
|
||||||
|
sourceDevice: (_, { resolveDataSource }) => {
|
||||||
|
// FIXME: This code is rather bulky, maybe there should be a first-class way to express "try to create a data object that may fail"
|
||||||
|
return Promise.try(() => {
|
||||||
|
return resolveDataSource("findmnt", mountpoint);
|
||||||
|
}).then((mount) => {
|
||||||
|
if (mount.sourceDevice != null) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return fs.realpathAsync(mount.sourceDevice);
|
||||||
|
}).then((sourcePath) => {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return resolveDataSource("lsblk", { path: sourcePath });
|
||||||
|
}).then((lsblkResult) => {
|
||||||
|
if (lsblkResult != null) {
|
||||||
|
return types.BlockDevice({ path: sourcePath });
|
||||||
|
} else {
|
||||||
|
// This occurs when the `sourceDevice` is a valid device, but it is not a *block* device, eg. like with `/dev/fuse`
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
findmnt: {
|
||||||
|
[ID]: mountpoint,
|
||||||
|
id: "id",
|
||||||
|
// FIXME: Aren't we inferring the below somewhere else in the code, using the square brackets?
|
||||||
|
type: (mount) => {
|
||||||
|
if (mount.rootPath === "/") {
|
||||||
|
return "ROOT_MOUNT";
|
||||||
|
} else {
|
||||||
|
return "SUBMOUNT";
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// sourceDevice: (mount) => {
|
||||||
|
// return Promise.try(() => {
|
||||||
|
// if (mount.sourceDevice != null) {
|
||||||
|
// return Promise.try(() => {
|
||||||
|
// return fs.realpathAsync(mount.sourceDevice);
|
||||||
|
// }).then((sourcePath) => {
|
||||||
|
// return types.BlockDevice({ path: sourcePath });
|
||||||
|
// });
|
||||||
|
// } else {
|
||||||
|
// return null;
|
||||||
|
// }
|
||||||
|
// });
|
||||||
|
// },
|
||||||
|
filesystem: "filesystem",
|
||||||
|
options: "options",
|
||||||
|
label: "label",
|
||||||
|
uuid: "uuid",
|
||||||
|
partitionLabel: "partitionLabel",
|
||||||
|
partitionUUID: "partitionUUID",
|
||||||
|
deviceNumber: "deviceNumber",
|
||||||
|
totalSpace: "totalSpace",
|
||||||
|
freeSpace: "freeSpace",
|
||||||
|
usedSpace: "usedSpace",
|
||||||
|
rootPath: "rootPath",
|
||||||
|
taskID: "taskID",
|
||||||
|
optionalFields: "optionalFields",
|
||||||
|
propagationFlags: "propagationFlags",
|
||||||
|
children: (mount) => {
|
||||||
|
return mount.children.map((child) => {
|
||||||
|
return Mount({ mountpoint: child.mountpoint });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,6 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = function concat(characters) {
|
||||||
|
// NOTE: This function doesn't really *do* much, it mostly exists to have a more conceptually useful name for this operation (since `.join("")` is non-obvious as to its purpose). This operation is often needed when writing PEG.js parsers, since those will parse byte-by-byte, and so any repeating modifier will result in an *array of characters* when what you usually want is a string. This makes it a string.
|
||||||
|
return characters.join("");
|
||||||
|
};
|
@ -1,8 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
const matchOrError = require("./match-or-error");
|
|
||||||
|
|
||||||
module.exports = function deviceNameFromPath(path) {
|
|
||||||
let [name] = matchOrError(/^\/dev\/(.+)$/, path);
|
|
||||||
return name;
|
|
||||||
};
|
|
@ -1,375 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
require("array.prototype.flat").shim();
|
|
||||||
|
|
||||||
const Promise = require("bluebird");
|
|
||||||
const util = require("util");
|
|
||||||
const execFileAsync = util.promisify(require("child_process").execFile);
|
|
||||||
const execAll = require("execall");
|
|
||||||
const debug = require("debug")("cvm:execBinary");
|
|
||||||
|
|
||||||
const errors = require("./errors");
|
|
||||||
|
|
||||||
let None = Symbol("None");
|
|
||||||
|
|
||||||
/* FIXME: How to handle partial result parsing when an error is encountered in the parsing code? */
|
|
||||||
/* FIXME: "terminal" flag for individual matches in exec-binary */
|
|
||||||
/* FIXME: Test that flag-dash prevention in arguments works */
|
|
||||||
|
|
||||||
function keyToFlagName(key) {
|
|
||||||
if (key.startsWith("!")) {
|
|
||||||
return key.slice(1);
|
|
||||||
} else if (key.length === 1) {
|
|
||||||
return `-${key}`;
|
|
||||||
} else {
|
|
||||||
return `--${key}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function flagValueToArgs(key, value) {
|
|
||||||
if (value === true) {
|
|
||||||
return [key];
|
|
||||||
} else if (Array.isArray(value)) {
|
|
||||||
return value.map((item) => {
|
|
||||||
return flagValueToArgs(key, item);
|
|
||||||
}).flat();
|
|
||||||
} else {
|
|
||||||
return [key, value];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function flagsToArgs(flags) {
|
|
||||||
return Object.keys(flags).map((key) => {
|
|
||||||
let value = flags[key];
|
|
||||||
let flagName = keyToFlagName(key);
|
|
||||||
|
|
||||||
return flagValueToArgs(flagName, value);
|
|
||||||
}).flat();
|
|
||||||
}
|
|
||||||
|
|
||||||
function regexExpectationsForChannel(object, channel) {
|
|
||||||
return object._settings.expectations.filter((expectation) => {
|
|
||||||
return expectation.channel === channel && expectation.type === "regex";
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function executeExpectation(expectation, stdout, stderr) {
|
|
||||||
let output = (expectation.channel === "stdout") ? stdout : stderr;
|
|
||||||
|
|
||||||
if (expectation.type === "regex") {
|
|
||||||
if (expectation.regex.test(output)) {
|
|
||||||
return executeRegexExpectation(expectation, output);
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
} else if (expectation.type === "json") {
|
|
||||||
let parsedOutput = JSON.parse(output);
|
|
||||||
|
|
||||||
if (expectation.callback != null) {
|
|
||||||
return expectation.callback(parsedOutput);
|
|
||||||
} else {
|
|
||||||
return parsedOutput;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Error(`Unexpected expectation type: ${expectation.type}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function executeRegexExpectation(expectation, input) {
|
|
||||||
function processResult(fullMatch, groups) {
|
|
||||||
if (expectation.callback != null) {
|
|
||||||
return expectation.callback(groups, fullMatch, input);
|
|
||||||
} else {
|
|
||||||
return groups;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (expectation.matchAll) {
|
|
||||||
let matches = execAll(expectation.regex, input);
|
|
||||||
|
|
||||||
if (matches.length > 0) { /* FILEBUG: File issue on execall repo to document the no-match output */
|
|
||||||
let results = matches.map((match) => {
|
|
||||||
return processResult(match.match, match.sub);
|
|
||||||
}).filter((result) => {
|
|
||||||
return (result !== None);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (results.length > 0) {
|
|
||||||
return results;
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let match = expectation.regex.exec(input);
|
|
||||||
|
|
||||||
if (match != null) {
|
|
||||||
return processResult(match[0], match.slice(1));
|
|
||||||
} else {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function verifyRegex(regex, {matchAll}) {
|
|
||||||
if (matchAll === true && !regex.flags.includes("g")) {
|
|
||||||
throw new Error("You enabled the 'matchAll' option, but the specified regular expression is not a global one; you probably forgot to specify the 'g' flag");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateArguments(args) {
|
|
||||||
if (args.some((arg) => arg == null)) {
|
|
||||||
throw new Error("One or more arguments were undefined or null; this is probably a mistake in how you're calling the command");
|
|
||||||
} else if (args.some((arg) => arg[0] === "-")) {
|
|
||||||
throw new Error("For security reasons, command arguments cannot start with a dash; use the 'withFlags' method if you want to specify flags");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = function createBinaryInvocation(command, args = []) {
|
|
||||||
/* FIXME: The below disallows dashes in the args, but not in the command. Is that what we want? */
|
|
||||||
validateArguments(args);
|
|
||||||
|
|
||||||
return {
|
|
||||||
_settings: {
|
|
||||||
asRoot: false,
|
|
||||||
singleResult: false,
|
|
||||||
atLeastOneResult: false,
|
|
||||||
jsonStdout: false,
|
|
||||||
jsonStderr: false,
|
|
||||||
expectations: [],
|
|
||||||
flags: {},
|
|
||||||
environment: {}
|
|
||||||
},
|
|
||||||
_withSettings: function (newSettings) {
|
|
||||||
let newObject = Object.assign({}, this, {
|
|
||||||
_settings: Object.assign({}, this._settings, newSettings)
|
|
||||||
});
|
|
||||||
|
|
||||||
/* FIXME: Make this ignore json expectations */
|
|
||||||
let hasStdoutExpectations = (regexExpectationsForChannel(newObject, "stdout").length > 0);
|
|
||||||
let hasStderrExpectations = (regexExpectationsForChannel(newObject, "stderr").length > 0);
|
|
||||||
|
|
||||||
if (newObject._settings.jsonStdout && hasStdoutExpectations) {
|
|
||||||
throw new Error("The 'expectJsonStdout' and 'expectStdout' options cannot be combined");
|
|
||||||
} else if (newObject._settings.jsonStderr && hasStderrExpectations) {
|
|
||||||
throw new Error("The 'expectJsonStderr' and 'expectStderr' options cannot be combined");
|
|
||||||
} else {
|
|
||||||
return newObject;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
asRoot: function () {
|
|
||||||
return this._withSettings({ asRoot: true });
|
|
||||||
},
|
|
||||||
singleResult: function () {
|
|
||||||
return this._withSettings({ singleResult: true });
|
|
||||||
},
|
|
||||||
atLeastOneResult: function () {
|
|
||||||
return this._withSettings({ atLeastOneResult: true });
|
|
||||||
},
|
|
||||||
/* NOTE: Subsequent withFlags calls involving the same flag key will *override* the earlier value, not add to it! */
|
|
||||||
withFlags: function (flags) {
|
|
||||||
if (flags != null) {
|
|
||||||
return this._withSettings({
|
|
||||||
flags: Object.assign({}, this._settings.flags, flags)
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
withEnvironment: function (environment) {
|
|
||||||
if (environment != null) {
|
|
||||||
return this._withSettings({
|
|
||||||
environment: Object.assign({}, this._settings.environment, environment)
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
withModifier: function (modifierFunction) {
|
|
||||||
if (modifierFunction != null) {
|
|
||||||
return modifierFunction(this);
|
|
||||||
} else {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
expectJsonStdout: function (callback) {
|
|
||||||
if (!this._settings.jsonStdout) {
|
|
||||||
return this._withSettings({
|
|
||||||
jsonStdout: true,
|
|
||||||
expectations: this._settings.expectations.concat([{
|
|
||||||
type: "json",
|
|
||||||
channel: "stdout",
|
|
||||||
key: "stdout",
|
|
||||||
callback: callback
|
|
||||||
}])
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
expectJsonStderr: function (callback) {
|
|
||||||
if (!this._settings.jsonStderr) {
|
|
||||||
return this._withSettings({
|
|
||||||
jsonStderr: true,
|
|
||||||
expectations: this._settings.expectations.concat([{
|
|
||||||
type: "json",
|
|
||||||
channel: "stderr",
|
|
||||||
key: "stderr",
|
|
||||||
callback: callback
|
|
||||||
}])
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
expectStdout: function (key, regex, {required, result, matchAll} = {}) {
|
|
||||||
verifyRegex(regex, {matchAll});
|
|
||||||
|
|
||||||
return this._withSettings({
|
|
||||||
expectations: this._settings.expectations.concat([{
|
|
||||||
type: "regex",
|
|
||||||
channel: "stdout",
|
|
||||||
required: (required === true),
|
|
||||||
key: key,
|
|
||||||
regex: regex,
|
|
||||||
callback: result,
|
|
||||||
matchAll: matchAll
|
|
||||||
}])
|
|
||||||
});
|
|
||||||
},
|
|
||||||
expectStderr: function (key, regex, {required, result, matchAll} = {}) {
|
|
||||||
verifyRegex(regex, {matchAll});
|
|
||||||
|
|
||||||
return this._withSettings({
|
|
||||||
expectations: this._settings.expectations.concat([{
|
|
||||||
type: "regex",
|
|
||||||
channel: "stderr",
|
|
||||||
required: (required === true),
|
|
||||||
key: key,
|
|
||||||
regex: regex,
|
|
||||||
callback: result,
|
|
||||||
matchAll: matchAll
|
|
||||||
}])
|
|
||||||
});
|
|
||||||
},
|
|
||||||
then: function () {
|
|
||||||
throw new Error("Attempted to use a command builder as a Promise; you probably forgot to call .execute");
|
|
||||||
},
|
|
||||||
execute: function () {
|
|
||||||
return Promise.try(() => {
|
|
||||||
let effectiveCommand = command;
|
|
||||||
let effectiveArgs = flagsToArgs(this._settings.flags).concat(args);
|
|
||||||
|
|
||||||
if (this._settings.asRoot) {
|
|
||||||
effectiveCommand = "sudo";
|
|
||||||
effectiveArgs = [command].concat(effectiveArgs);
|
|
||||||
}
|
|
||||||
|
|
||||||
let effectiveCompleteCommand = [effectiveCommand].concat(effectiveArgs);
|
|
||||||
|
|
||||||
return Promise.try(() => {
|
|
||||||
debug(`Running: ${effectiveCommand} ${effectiveArgs.map((arg) => `"${arg}"`).join(" ")}`);
|
|
||||||
|
|
||||||
return execFileAsync(effectiveCommand, effectiveArgs, {
|
|
||||||
env: Object.assign({}, process.env, this._settings.environment)
|
|
||||||
});
|
|
||||||
}).then(({stdout, stderr}) => {
|
|
||||||
return { stdout, stderr, exitCode: 0 };
|
|
||||||
}).catch((error) => {
|
|
||||||
let {stdout, stderr} = error;
|
|
||||||
|
|
||||||
let exitCode = (typeof error.code === "number") ? error.code : null;
|
|
||||||
|
|
||||||
return { stdout, stderr, error, exitCode };
|
|
||||||
}).then(({stdout, stderr, error, exitCode}) => {
|
|
||||||
let finalResult, resultFound;
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (this._settings.singleResult) {
|
|
||||||
let result = None;
|
|
||||||
let i = 0;
|
|
||||||
|
|
||||||
while (result === None && i < this._settings.expectations.length) {
|
|
||||||
let expectation = this._settings.expectations[i];
|
|
||||||
|
|
||||||
result = executeExpectation(expectation, stdout, stderr);
|
|
||||||
|
|
||||||
if (expectation.required === true && result === None) {
|
|
||||||
throw new errors.ExpectedOutputMissing(`Expected output not found for key '${expectation.key}'`, {
|
|
||||||
exitCode: exitCode,
|
|
||||||
stdout: stdout,
|
|
||||||
stderr: stderr
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
finalResult = result;
|
|
||||||
resultFound = (finalResult !== None);
|
|
||||||
} else {
|
|
||||||
let results = this._settings.expectations.map((expectation) => {
|
|
||||||
let result = executeExpectation(expectation, stdout, stderr);
|
|
||||||
|
|
||||||
if (result === None) {
|
|
||||||
if (expectation.required === true) {
|
|
||||||
throw new errors.ExpectedOutputMissing(`Expected output not found for key '${expectation.key}'`, {
|
|
||||||
exitCode: exitCode,
|
|
||||||
stdout: stdout,
|
|
||||||
stderr: stderr
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return { key: expectation.key, value: result };
|
|
||||||
}
|
|
||||||
}).filter((result) => {
|
|
||||||
return (result !== None);
|
|
||||||
});
|
|
||||||
|
|
||||||
resultFound = (results.length > 0);
|
|
||||||
|
|
||||||
finalResult = results.reduce((object, {key, value}) => {
|
|
||||||
return Object.assign(object, {
|
|
||||||
[key]: value
|
|
||||||
});
|
|
||||||
}, {});
|
|
||||||
}
|
|
||||||
} catch (processingError) {
|
|
||||||
throw errors.UnexpectedOutput.chain(processingError, "An error occurred while processing command output", {
|
|
||||||
command: effectiveCompleteCommand,
|
|
||||||
exitCode: exitCode,
|
|
||||||
stdout: stdout,
|
|
||||||
stderr: stderr
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (resultFound || this._settings.atLeastOneResult === false) {
|
|
||||||
if (error != null) {
|
|
||||||
throw new errors.NonZeroExitCode.chain(error, `Process '${command}' exited with code ${exitCode}`, {
|
|
||||||
exitCode: exitCode,
|
|
||||||
stdout: stdout,
|
|
||||||
stderr: stderr,
|
|
||||||
result: finalResult
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
return {
|
|
||||||
exitCode: exitCode,
|
|
||||||
stdout: stdout,
|
|
||||||
stderr: stderr,
|
|
||||||
result: finalResult
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new errors.ExpectedOutputMissing("None of the expected outputs for the command were encountered, but at least one result is required", {
|
|
||||||
exitCode: exitCode,
|
|
||||||
stdout: stdout,
|
|
||||||
stderr: stderr
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}).catch(errors.CommandExecutionFailed.rethrowChained(`An error occurred while executing '${command}'`, {
|
|
||||||
command: effectiveCompleteCommand
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
@ -1,62 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
const Promise = require("bluebird");
|
|
||||||
|
|
||||||
function withProperty(dataSource, id, property) {
|
|
||||||
return withData(dataSource, id, (value) => {
|
|
||||||
return value[property];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function withData(dataSource, id, callback) {
|
|
||||||
return function (args, context) {
|
|
||||||
let {data} = context;
|
|
||||||
|
|
||||||
return Promise.try(() => {
|
|
||||||
if (data[dataSource] != null) {
|
|
||||||
return data[dataSource].load(id);
|
|
||||||
} else {
|
|
||||||
throw new Error(`Specified data source '${dataSource}' does not exist`);
|
|
||||||
}
|
|
||||||
}).then((value) => {
|
|
||||||
if (value != null) {
|
|
||||||
return callback(value, args, context);
|
|
||||||
} else {
|
|
||||||
throw new Error(`Got a null value from data source '${dataSource}' for ID '${id}'`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let ID = Symbol("ID");
|
|
||||||
let LocalProperties = Symbol("localProperties");
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
ID: ID,
|
|
||||||
LocalProperties: LocalProperties,
|
|
||||||
createDataObject: function createDataObject(mappings) {
|
|
||||||
let object = {};
|
|
||||||
|
|
||||||
if (mappings[LocalProperties] != null) {
|
|
||||||
Object.assign(object, mappings[LocalProperties]);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let [dataSource, items] of Object.entries(mappings)) {
|
|
||||||
if (items[ID] != null) {
|
|
||||||
let id = items[ID];
|
|
||||||
|
|
||||||
for (let [property, source] of Object.entries(items)) {
|
|
||||||
if (typeof source === "string") {
|
|
||||||
object[property] = withProperty(dataSource, id, source);
|
|
||||||
} else if (typeof source === "function") {
|
|
||||||
object[property] = withData(dataSource, id, source);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Error(`No object ID was provided for the '${dataSource}' data source`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return object;
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,11 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
const graphql = require("graphql");
|
|
||||||
|
|
||||||
module.exports = function createGraphQLInterface(schema, options, root) {
|
|
||||||
return function makeQuery(query, args) {
|
|
||||||
return graphql.graphql(schema, query, root, {
|
|
||||||
data: (options.loaderFactory != null) ? options.loaderFactory() : {}
|
|
||||||
}, args);
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,19 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
module.exports = function linearizeTree(rootList, childrenProperty = "children") {
|
|
||||||
let linearizedItems = [];
|
|
||||||
|
|
||||||
function add(list) {
|
|
||||||
for (let item of list) {
|
|
||||||
linearizedItems.push(item);
|
|
||||||
|
|
||||||
if (item[childrenProperty] != null) {
|
|
||||||
add(item[childrenProperty]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
add(rootList);
|
|
||||||
|
|
||||||
return linearizedItems;
|
|
||||||
};
|
|
@ -1,11 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
module.exports = function mapValue(value, mapping) {
|
|
||||||
if (value == null) {
|
|
||||||
return value;
|
|
||||||
} else if (mapping[value] != null) {
|
|
||||||
return mapping[value];
|
|
||||||
} else {
|
|
||||||
throw new Error(`Unrecognized value: ${value}`);
|
|
||||||
}
|
|
||||||
};
|
|
@ -1,17 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
module.exports = function matchOrError(regex, string) {
|
|
||||||
if (regex == null) {
|
|
||||||
throw new Error("No regular expression was provided");
|
|
||||||
} else if (string == null) {
|
|
||||||
throw new Error("No string to match on was provided");
|
|
||||||
} else {
|
|
||||||
let match = regex.exec(string);
|
|
||||||
|
|
||||||
if (match == null) {
|
|
||||||
throw new Error(`Regular expression ${regex.toString()} failed to match on string: ${string}`);
|
|
||||||
} else {
|
|
||||||
return match.slice(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
@ -0,0 +1,11 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const errorChain = require("error-chain");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
UnexpectedOutput: errorChain.create("UnexpectedOutput"),
|
||||||
|
ExpectedOutputMissing: errorChain.create("ExpectedOutputMissing"),
|
||||||
|
OutputParsingFailed: errorChain.create("OutputParsingFailed"),
|
||||||
|
NonZeroExitCode: errorChain.create("NonZeroExitCode"),
|
||||||
|
CommandExecutionFailed: errorChain.create("CommandExecutionFailed"),
|
||||||
|
};
|
@ -0,0 +1,285 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
require("array.prototype.flat").shim();
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const util = require("util");
|
||||||
|
const execFileAsync = util.promisify(require("child_process").execFile);
|
||||||
|
const debug = require("debug")("cvm:execBinary");
|
||||||
|
const asExpression = require("as-expression");
|
||||||
|
const { rethrowAs } = require("error-chain");
|
||||||
|
const textParser = require("../text-parser");
|
||||||
|
|
||||||
|
const errors = require("./errors");
|
||||||
|
|
||||||
|
/* FIXME: How to handle partial result parsing when an error is encountered in the parsing adapter? */
|
||||||
|
/* FIXME: Test that flag-dash prevention in arguments works */
|
||||||
|
|
||||||
|
function keyToFlagName(key) {
|
||||||
|
if (key.startsWith("!")) {
|
||||||
|
return key.slice(1);
|
||||||
|
} else if (key.length === 1) {
|
||||||
|
return `-${key}`;
|
||||||
|
} else {
|
||||||
|
return `--${key}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function flagValueToArgs(key, value) {
|
||||||
|
if (value === true) {
|
||||||
|
return [key];
|
||||||
|
} else if (Array.isArray(value)) {
|
||||||
|
return value.map((item) => {
|
||||||
|
return flagValueToArgs(key, item);
|
||||||
|
}).flat();
|
||||||
|
} else {
|
||||||
|
return [key, value];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function flagsToArgs(flags) {
|
||||||
|
return Object.keys(flags).map((key) => {
|
||||||
|
let value = flags[key];
|
||||||
|
let flagName = keyToFlagName(key);
|
||||||
|
|
||||||
|
return flagValueToArgs(flagName, value);
|
||||||
|
}).flat();
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateArguments(args) {
|
||||||
|
if (args.some((arg) => arg == null)) {
|
||||||
|
throw new Error("One or more arguments were undefined or null; this is probably a mistake in how you're calling the command");
|
||||||
|
} else if (args.some((arg) => arg[0] === "-")) {
|
||||||
|
throw new Error("For security reasons, command arguments cannot start with a dash; use the 'withFlags' method if you want to specify flags");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Immutable-builder abstraction
|
||||||
|
// FIXME: validatem
|
||||||
|
module.exports = function createBinaryInvocation(command, args = []) {
|
||||||
|
/* FIXME: The below disallows dashes in the args, but not in the command. Is that what we want? */
|
||||||
|
validateArguments(args);
|
||||||
|
|
||||||
|
return {
|
||||||
|
_settings: {
|
||||||
|
asRoot: false,
|
||||||
|
expectations: [],
|
||||||
|
flags: {},
|
||||||
|
environment: {},
|
||||||
|
expectedExitCodes: [0],
|
||||||
|
resultMerger: function (results) {
|
||||||
|
return results.reduce((merged, result) => Object.assign(merged, result), {});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_withSettings: function (newSettings) {
|
||||||
|
let newObject = Object.assign({}, this, {
|
||||||
|
_settings: Object.assign({}, this._settings, newSettings)
|
||||||
|
});
|
||||||
|
|
||||||
|
return newObject;
|
||||||
|
},
|
||||||
|
_withExpectation: function (expectation) {
|
||||||
|
return this._withSettings({
|
||||||
|
expectations: this._settings.expectations.concat([ expectation ])
|
||||||
|
});
|
||||||
|
},
|
||||||
|
asRoot: function () {
|
||||||
|
return this._withSettings({ asRoot: true });
|
||||||
|
},
|
||||||
|
withFlags: function (flags) {
|
||||||
|
if (flags != null) {
|
||||||
|
return this._withSettings({
|
||||||
|
flags: Object.assign({}, this._settings.flags, flags)
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
withEnvironment: function (environment) {
|
||||||
|
if (environment != null) {
|
||||||
|
return this._withSettings({
|
||||||
|
environment: Object.assign({}, this._settings.environment, environment)
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
withModifier: function (modifierFunction) {
|
||||||
|
if (modifierFunction != null) {
|
||||||
|
return modifierFunction(this);
|
||||||
|
} else {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
expectOnStdout: function (adapter) {
|
||||||
|
return this._withExpectation({
|
||||||
|
channel: "stdout",
|
||||||
|
adapter: adapter
|
||||||
|
});
|
||||||
|
},
|
||||||
|
requireOnStdout: function (adapter) {
|
||||||
|
return this._withExpectation({
|
||||||
|
channel: "stdout",
|
||||||
|
adapter: adapter,
|
||||||
|
required: true
|
||||||
|
});
|
||||||
|
},
|
||||||
|
failOnStdout: function (adapter) {
|
||||||
|
return this._withExpectation({
|
||||||
|
channel: "stdout",
|
||||||
|
adapter: adapter,
|
||||||
|
disallowed: true
|
||||||
|
});
|
||||||
|
},
|
||||||
|
expectOnStderr: function (adapter) {
|
||||||
|
return this._withExpectation({
|
||||||
|
channel: "stderr",
|
||||||
|
adapter: adapter
|
||||||
|
});
|
||||||
|
},
|
||||||
|
requireOnStderr: function (adapter) {
|
||||||
|
return this._withExpectation({
|
||||||
|
channel: "stderr",
|
||||||
|
adapter: adapter,
|
||||||
|
required: true
|
||||||
|
});
|
||||||
|
},
|
||||||
|
failOnStderr: function (adapter) {
|
||||||
|
return this._withExpectation({
|
||||||
|
channel: "stderr",
|
||||||
|
adapter: adapter,
|
||||||
|
disallowed: true
|
||||||
|
});
|
||||||
|
},
|
||||||
|
failOnAnyStderr: function () {
|
||||||
|
return this._withExpectation({
|
||||||
|
channel: "stderr",
|
||||||
|
adapter: null,
|
||||||
|
disallowed: true
|
||||||
|
});
|
||||||
|
},
|
||||||
|
then: function () {
|
||||||
|
throw new Error("Attempted to use a command builder as a Promise; you probably forgot to call .execute");
|
||||||
|
},
|
||||||
|
execute: function () {
|
||||||
|
return Promise.try(() => {
|
||||||
|
let effectiveCommand = command;
|
||||||
|
let effectiveArgs = flagsToArgs(this._settings.flags).concat(args);
|
||||||
|
|
||||||
|
if (this._settings.asRoot) {
|
||||||
|
effectiveCommand = "sudo";
|
||||||
|
effectiveArgs = [command].concat(effectiveArgs);
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Shouldn't we represent this in its original form, or at least an escaped form? And suffix 'Unsafe' to ensure it's not used in any actual execution code.
|
||||||
|
let effectiveCompleteCommand = [effectiveCommand].concat(effectiveArgs);
|
||||||
|
|
||||||
|
return Promise.try(() => {
|
||||||
|
debug(`Running: ${effectiveCommand} ${effectiveArgs.map((arg) => `"${arg}"`).join(" ")}`);
|
||||||
|
|
||||||
|
return execFileAsync(effectiveCommand, effectiveArgs, {
|
||||||
|
env: Object.assign({}, process.env, this._settings.environment)
|
||||||
|
});
|
||||||
|
}).then(({stdout, stderr}) => {
|
||||||
|
return { stdout, stderr, exitCode: 0 };
|
||||||
|
}).catch((error) => {
|
||||||
|
let {stdout, stderr} = error;
|
||||||
|
|
||||||
|
let exitCode = (typeof error.code === "number") ? error.code : null;
|
||||||
|
|
||||||
|
return { stdout, stderr, error, exitCode };
|
||||||
|
}).then(({stdout, stderr, error, exitCode}) => {
|
||||||
|
try {
|
||||||
|
let channels = { stdout, stderr };
|
||||||
|
|
||||||
|
if (!this._settings.expectedExitCodes.includes(exitCode)) {
|
||||||
|
// FIXME: Can we actually pass `error` to be chained onto here, when there's a case where `error` is undefined? Namely, when requiring a non-zero exit code, but the process exits with 0.
|
||||||
|
throw new errors.NonZeroExitCode.chain(error, `Expected exit code to be one of ${JSON.stringify(this._settings.expectedExitCodes)}, but got '${exitCode}'`, {
|
||||||
|
exitCode: exitCode,
|
||||||
|
stdout: stdout,
|
||||||
|
stderr: stderr
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
let expectationResults = this._settings.expectations
|
||||||
|
.map((expectation) => {
|
||||||
|
if (expectation.adapter == null) {
|
||||||
|
if (channels[expectation.channel] != null) {
|
||||||
|
if (channels[expectation.channel].length > 0) {
|
||||||
|
throw new errors.UnexpectedOutput(`Encountered output on '${expectation.channel}', but no output was supposed to be produced there`, {
|
||||||
|
failedChannel: expectation.channel
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(`Encountered expectation for unexpected channel '${expectation.channel}'; this is a bug, please report it`, {
|
||||||
|
failedChannel: expectation.channel
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let result = asExpression(() => {
|
||||||
|
try {
|
||||||
|
return expectation.adapter.parse(channels[expectation.channel].toString());
|
||||||
|
} catch (error) {
|
||||||
|
// TODO: What if both `required` *and* `disallowed`? Can that ever occur, conceptually speaking?
|
||||||
|
if (error instanceof textParser.NoResult) {
|
||||||
|
// FIXME: Annotate to make error source clearer?
|
||||||
|
if (expectation.required === true) {
|
||||||
|
throw error;
|
||||||
|
} else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw errors.OutputParsingFailed.chain(error, `An error occurred while parsing '${expectation.channel}'`, {
|
||||||
|
failedChannel: expectation.channel
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result !== undefined && (typeof result !== "object" || Array.isArray(result))) {
|
||||||
|
throw new Error(`Output adapters may only return a plain object from their parse method (or nothing at all)`);
|
||||||
|
} else if (result !== undefined && expectation.disallowed === true) {
|
||||||
|
// TODO: How to make this error more informative?
|
||||||
|
throw new errors.UnexpectedOutput(`Encountered output on '${expectation.channel}' that isn't supposed to be there`, {
|
||||||
|
failedChannel: expectation.channel
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((result) => {
|
||||||
|
return (result != null);
|
||||||
|
});
|
||||||
|
|
||||||
|
let mergedResults = (expectationResults.length > 0)
|
||||||
|
? this._settings.resultMerger(expectationResults)
|
||||||
|
: expectationResults[0];
|
||||||
|
|
||||||
|
return {
|
||||||
|
exitCode: exitCode,
|
||||||
|
stdout: stdout,
|
||||||
|
stderr: stderr,
|
||||||
|
result: mergedResults
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// FIXME: Use getAllContext
|
||||||
|
let message = (error.failedChannel != null)
|
||||||
|
? `Failed while processing ${error.failedChannel} of command`
|
||||||
|
: "Failed while processing result of command execution";
|
||||||
|
|
||||||
|
throw errors.CommandExecutionFailed.chain(error, message, {
|
||||||
|
exitCode: exitCode,
|
||||||
|
stdout: stdout,
|
||||||
|
stderr: stderr
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}).catch(rethrowAs(errors.CommandExecutionFailed, `An error occurred while executing '${command}'`, {
|
||||||
|
command: effectiveCompleteCommand
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,67 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const matchValue = require("match-value");
|
||||||
|
const execBinary = require("../exec-binary");
|
||||||
|
const parseIECBytes = require("../parse-bytes-iec");
|
||||||
|
const createJSONParser = require("../text-parser-json");
|
||||||
|
|
||||||
|
function parseBoolean(value) {
|
||||||
|
if (typeof value === "boolean") {
|
||||||
|
// Newer versions of `lsblk` correctly use boolean values
|
||||||
|
return value;
|
||||||
|
} else {
|
||||||
|
return matchValue(value, {
|
||||||
|
0: false,
|
||||||
|
1: true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapType(value) {
|
||||||
|
return matchValue(value, {
|
||||||
|
part: "partition",
|
||||||
|
disk: "disk",
|
||||||
|
loop: "loopDevice",
|
||||||
|
rom: "disk",
|
||||||
|
lvm: "partition"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapSubType(value) {
|
||||||
|
return matchValue(value, {
|
||||||
|
part: null,
|
||||||
|
disk: null,
|
||||||
|
loop: null,
|
||||||
|
rom: "readOnlyMedia",
|
||||||
|
lvm: "lvm"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapDeviceList(devices) {
|
||||||
|
return devices.map((device) => {
|
||||||
|
return {
|
||||||
|
name: device.name,
|
||||||
|
path: device.path,
|
||||||
|
type: mapType(device.type),
|
||||||
|
subType: mapSubType(device.type),
|
||||||
|
mountpoint: device.mountpoint,
|
||||||
|
deviceNumber: device["maj:min"],
|
||||||
|
removable: parseBoolean(device.rm),
|
||||||
|
readOnly: parseBoolean(device.ro),
|
||||||
|
size: parseIECBytes(device.size),
|
||||||
|
children: (device.children != null) ? mapDeviceList(device.children) : []
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = function lsblk() {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("lsblk")
|
||||||
|
.withFlags({ json: true, "output-all": true })
|
||||||
|
.requireOnStdout(createJSONParser())
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
return mapDeviceList(output.result.blockdevices);
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,12 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const errorChain = require("error-chain");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
InvalidPath: errorChain.create("InvalidPath"),
|
||||||
|
InvalidName: errorChain.create("InvalidName"),
|
||||||
|
PartitionExists: errorChain.create("PartitionExists"),
|
||||||
|
VolumeGroupExists: errorChain.create("VolumeGroupExists"),
|
||||||
|
InvalidVolumeGroup: errorChain.create("InvalidVolumeGroup"),
|
||||||
|
PhysicalVolumeInUse: errorChain.create("PhysicalVolumeInUse"),
|
||||||
|
};
|
@ -0,0 +1,42 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const execAll = require("execall");
|
||||||
|
const execBinary = require("../exec-binary");
|
||||||
|
|
||||||
|
function createNamespaceParser() {
|
||||||
|
return {
|
||||||
|
supportsStreams: false,
|
||||||
|
parse: function (input) {
|
||||||
|
return {
|
||||||
|
namespaces: execAll(/^\[\s*[0-9]+\]:(?:(0)|0x([0-9A-F]+))$/gm, input)
|
||||||
|
.map((match) => {
|
||||||
|
let [ idLiteral, idHex ] = match.sub;
|
||||||
|
|
||||||
|
if (idLiteral != null) {
|
||||||
|
/* NOTE: This is a special case for when the value is exactly 0 - and maybe there are others too, hence still doing a parseInt, so we can easily change the regex later if needed:
|
||||||
|
https://stackoverflow.com/questions/11922876/what-does-a-hash-sign-do-in-printf#comment15879638_11922887
|
||||||
|
https://github.com/linux-nvme/nvme-cli/blob/f9ebefe27b0596006d76d58f3219a9fc12e88664/nvme.c#L979
|
||||||
|
*/
|
||||||
|
return parseInt(idLiteral);
|
||||||
|
} else {
|
||||||
|
return parseInt(idHex, 16);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
listNamespaces: function ({ devicePath }) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("nvme", [ "list-ns", devicePath ])
|
||||||
|
.asRoot()
|
||||||
|
.expectOnStdout(createNamespaceParser())
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
return output.result.namespaces;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
@ -0,0 +1,57 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const path = require("path");
|
||||||
|
const execBinary = require("../exec-binary");
|
||||||
|
const createPegParser = require("../text-parser-pegjs");
|
||||||
|
const itemsToObject = require("../items-to-object");
|
||||||
|
|
||||||
|
/* FIXME: Error handling, eg. device not found errors */
|
||||||
|
|
||||||
|
function outputParser(rootRule) {
|
||||||
|
return createPegParser({
|
||||||
|
grammarFile: path.join(__dirname, "./parser.pegjs"),
|
||||||
|
options: {
|
||||||
|
allowedStartRules: [ rootRule ]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
attributes: function ({ devicePath }) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("smartctl", [devicePath])
|
||||||
|
.asRoot()
|
||||||
|
.withFlags({ attributes: true })
|
||||||
|
.requireOnStdout(outputParser("RootAttributes"))
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
// NOTE: Ignore the header, for now
|
||||||
|
return output.result.attributes;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
info: function ({ devicePath }) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("smartctl", [devicePath])
|
||||||
|
.asRoot()
|
||||||
|
.withFlags({ info: true })
|
||||||
|
.requireOnStdout(outputParser("RootInfo"))
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
// NOTE: Ignore the header, for now
|
||||||
|
return itemsToObject(output.result.fields);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
scan: function () {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("smartctl")
|
||||||
|
.asRoot()
|
||||||
|
.withFlags({ scan: true })
|
||||||
|
.requireOnStdout(outputParser("RootScan"))
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
// NOTE: Ignore the header, for now
|
||||||
|
return output.result.devices;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
@ -0,0 +1,16 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = function mapAttributeFlags(flagAsNumber) {
|
||||||
|
if (flagAsNumber & 128 || flagAsNumber & 64) {
|
||||||
|
throw new Error(`Encountered unknown flag byte in flag ${flagAsNumber.toString(16).padStart(4, "0")}`);
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
autoKeep: Boolean(flagAsNumber & 32),
|
||||||
|
eventCount: Boolean(flagAsNumber & 16),
|
||||||
|
errorRate: Boolean(flagAsNumber & 8),
|
||||||
|
affectsPerformance: Boolean(flagAsNumber & 4),
|
||||||
|
updatedOnline: Boolean(flagAsNumber & 2),
|
||||||
|
indicatesFailure: Boolean(flagAsNumber & 1),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
@ -0,0 +1,249 @@
|
|||||||
|
{
|
||||||
|
const matchValue = require("match-value");
|
||||||
|
const syncpipe = require("syncpipe");
|
||||||
|
|
||||||
|
const {B} = require("../unit-bytes-iec");
|
||||||
|
const mapAttributeFlags = require("./map-attribute-flags");
|
||||||
|
}
|
||||||
|
|
||||||
|
RootInfo
|
||||||
|
= header:Header infoSection:InfoSection Newline* {
|
||||||
|
return { ...header, fields: infoSection }
|
||||||
|
};
|
||||||
|
|
||||||
|
RootScan
|
||||||
|
= devices:ScanDevice* {
|
||||||
|
return { devices: devices };
|
||||||
|
}
|
||||||
|
|
||||||
|
RootAttributes
|
||||||
|
= header:Header attributesSection:AttributesSection Newline* {
|
||||||
|
return { ...header, attributes: attributesSection }
|
||||||
|
};
|
||||||
|
|
||||||
|
_
|
||||||
|
= (" " / "\t")*
|
||||||
|
|
||||||
|
RestOfLine
|
||||||
|
= content:$[^\n]+ Newline {
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
|
||||||
|
Newline
|
||||||
|
= "\n"
|
||||||
|
/ "\r\n"
|
||||||
|
|
||||||
|
Header 'header'
|
||||||
|
= "smartctl " versionString:RestOfLine "Copyright" copyrightStatement:RestOfLine Newline {
|
||||||
|
return { versionString, copyrightStatement };
|
||||||
|
}
|
||||||
|
|
||||||
|
BytesValue
|
||||||
|
= value:SeparatedNumberValue {
|
||||||
|
return B(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
NumberValue
|
||||||
|
= value:$[0-9]+ {
|
||||||
|
return parseInt(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
SeparatedNumberValue
|
||||||
|
= value:$[0-9,]+ {
|
||||||
|
return syncpipe(value, [
|
||||||
|
(_) => _.replace(/,/g, ""),
|
||||||
|
(_) => parseInt(_)
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
HexNumberValue
|
||||||
|
= value:$[0-9A-Fa-f]+ {
|
||||||
|
return parseInt(value, 16);
|
||||||
|
}
|
||||||
|
|
||||||
|
IdentifierValue
|
||||||
|
= value:$[a-zA-Z_-]+ {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// smartctl --scan
|
||||||
|
|
||||||
|
ScanDevice 'scanned device'
|
||||||
|
= path:$[^ ]+ _ "-d" _ interface_:$[^ ]+ _ RestOfLine {
|
||||||
|
return { path: path, interface: interface_ };
|
||||||
|
}
|
||||||
|
|
||||||
|
// smartctl --info
|
||||||
|
|
||||||
|
InfoSection 'information section'
|
||||||
|
= "=== START OF INFORMATION SECTION ===" Newline fields:(InfoField+) {
|
||||||
|
return fields.filter((field) => field != null);
|
||||||
|
}
|
||||||
|
|
||||||
|
InfoField 'information field'
|
||||||
|
= InfoFieldSimple
|
||||||
|
/ InfoFieldIgnored
|
||||||
|
/ InfoFieldSize
|
||||||
|
/ InfoFieldRPM
|
||||||
|
/ InfoFieldSectorSizes
|
||||||
|
/ InfoFieldBoolean
|
||||||
|
/ InfoFieldUnknown
|
||||||
|
|
||||||
|
InfoFieldSimpleKey
|
||||||
|
= "Device Model" { return "model"; }
|
||||||
|
/ "Model Number" { return "model"; }
|
||||||
|
/ "Model Family" { return "modelFamily"; }
|
||||||
|
/ "Serial Number" { return "serialNumber"; }
|
||||||
|
/ "LU WWN Device Id" { return "wwn"; }
|
||||||
|
/ "Firmware Version" { return "firmwareVersion"; }
|
||||||
|
/ "Form Factor" { return "formFactor"; }
|
||||||
|
/ "ATA Version is" { return "ataVersion"; }
|
||||||
|
/ "SATA Version is" { return "sataVersion"; }
|
||||||
|
|
||||||
|
InfoFieldSimple
|
||||||
|
= key:InfoFieldSimpleKey ":" _ value:RestOfLine {
|
||||||
|
return { key: key, value: value };
|
||||||
|
}
|
||||||
|
|
||||||
|
InfoFieldUnknown
|
||||||
|
= key:$[^:]+ ":" _ RestOfLine {
|
||||||
|
console.warn(`Encountered unrecognized SMART info key: ${key}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
InfoFieldIgnoredKey
|
||||||
|
= "Device is"
|
||||||
|
/ "Local Time is"
|
||||||
|
|
||||||
|
InfoFieldIgnored
|
||||||
|
= key:InfoFieldIgnoredKey ":" _ RestOfLine {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
/ "SMART support is:" _ ("Available" / "Unavailable") RestOfLine {
|
||||||
|
// We don't actually care about this entry, but have to specify its possible values explicitly, to distinguish it from the entry we *do* care about that (annoyingly) uses the same key; see InfoFieldBoolean
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
InfoFieldSize
|
||||||
|
// NOTE: We don't actually care about the human-friendly display size after the 'bytes' specifier, hence the RestOfLine
|
||||||
|
= InfoFieldSizeKey _ value:SeparatedNumberValue _ "bytes"? _ RestOfLine {
|
||||||
|
return {
|
||||||
|
key: "size",
|
||||||
|
value: B(value)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
InfoFieldSizeKey
|
||||||
|
= "User Capacity:"
|
||||||
|
/ "Total NVM Capacity:"
|
||||||
|
|
||||||
|
InfoFieldRPM
|
||||||
|
= "Rotation Rate:" _ value:NumberValue _ "rpm" Newline {
|
||||||
|
return {
|
||||||
|
key: "rpm",
|
||||||
|
value: value
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
InfoFieldSectorSizes
|
||||||
|
= "Sector Sizes:" _ logicalSize:BytesValue _ "bytes logical," _ physicalSize:BytesValue _ "bytes physical" Newline {
|
||||||
|
return {
|
||||||
|
key: "sectorSizes",
|
||||||
|
value: {
|
||||||
|
logical: logicalSize,
|
||||||
|
physical: physicalSize
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
InfoFieldBooleanKey
|
||||||
|
= "SMART support is" { return "smartEnabled"; }
|
||||||
|
|
||||||
|
InfoFieldBoolean
|
||||||
|
= key:InfoFieldBooleanKey ":" _ value:RestOfLine {
|
||||||
|
return {
|
||||||
|
key: key,
|
||||||
|
value: matchValue(value, {
|
||||||
|
Enabled: true,
|
||||||
|
Disabled: false
|
||||||
|
})
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// smartctl --attributes
|
||||||
|
|
||||||
|
AttributesSection
|
||||||
|
= AttributesSectionSATA
|
||||||
|
/ AttributesSectionNVMe
|
||||||
|
|
||||||
|
AttributesSectionSATA
|
||||||
|
= "=== START OF READ SMART DATA SECTION ===" Newline
|
||||||
|
"SMART Attributes Data Structure revision number:" _ NumberValue Newline
|
||||||
|
"Vendor Specific SMART Attributes with Thresholds:" Newline
|
||||||
|
"ID#" _ "ATTRIBUTE_NAME" _ "FLAG" _ "VALUE" _ "WORST" _ "THRESH" _ "TYPE" _ "UPDATED" _ "WHEN_FAILED" _ "RAW_VALUE" Newline
|
||||||
|
attributes:AttributeFieldSATA+ {
|
||||||
|
return attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
AttributesSectionNVMe
|
||||||
|
= "=== START OF SMART DATA SECTION ===" Newline
|
||||||
|
"SMART/Health Information (NVMe Log 0x02)" Newline
|
||||||
|
attributes:AttributeFieldNVMe+ {
|
||||||
|
return attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
AttributeFlags
|
||||||
|
= "0x" number:HexNumberValue {
|
||||||
|
return mapAttributeFlags(number);
|
||||||
|
}
|
||||||
|
|
||||||
|
AttributeUpdatedWhen
|
||||||
|
= "Always"
|
||||||
|
/ "Offline"
|
||||||
|
|
||||||
|
AttributeFailedWhen
|
||||||
|
= "FAILING_NOW"
|
||||||
|
/ "In_the_past"
|
||||||
|
/ "-"
|
||||||
|
|
||||||
|
AttributeFieldType
|
||||||
|
= "Pre-fail"
|
||||||
|
/ "Old_age"
|
||||||
|
|
||||||
|
AttributeFieldSATA
|
||||||
|
= _ id:NumberValue
|
||||||
|
_ attributeName:IdentifierValue
|
||||||
|
_ flags:AttributeFlags
|
||||||
|
_ value:NumberValue
|
||||||
|
_ worstValue:NumberValue
|
||||||
|
_ threshold:NumberValue
|
||||||
|
_ type:AttributeFieldType
|
||||||
|
_ updatedWhen:AttributeUpdatedWhen
|
||||||
|
_ failedWhen:AttributeFailedWhen
|
||||||
|
_ rawValue:RestOfLine {
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
attributeName,
|
||||||
|
flags,
|
||||||
|
value,
|
||||||
|
worstValue,
|
||||||
|
threshold,
|
||||||
|
rawValue,
|
||||||
|
updatedWhen: matchValue(updatedWhen, {
|
||||||
|
"Always": "always",
|
||||||
|
"Offline": "offline"
|
||||||
|
}),
|
||||||
|
type: matchValue(type, {
|
||||||
|
"Pre-fail": "preFail",
|
||||||
|
"Old_age": "oldAge"
|
||||||
|
}),
|
||||||
|
failingNow: (failedWhen === "FAILING_NOW"),
|
||||||
|
/* TODO: Should the below include the FAILING_NOW state? */
|
||||||
|
failedBefore: (failedWhen === "In_the_past")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
AttributeFieldNVMe
|
||||||
|
= label:$[^:]+ ":" _ value:RestOfLine {
|
||||||
|
return { label: label, value };
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const findInTree = require("./");
|
||||||
|
|
||||||
|
let tree = [{
|
||||||
|
name: "a",
|
||||||
|
children: [
|
||||||
|
{ name: "a1" },
|
||||||
|
{ name: "a2",
|
||||||
|
children: [
|
||||||
|
{ name: "a2a" },
|
||||||
|
{ name: "a2b" },
|
||||||
|
{ name: "a2c" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}, {
|
||||||
|
name: "b"
|
||||||
|
}];
|
||||||
|
|
||||||
|
console.log(findInTree({ tree, predicate: (item) => item.name === "a2" }));
|
||||||
|
console.log(findInTree({ tree, predicate: (item) => item.name === "nonexistent" }));
|
@ -0,0 +1,35 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const { validateOptions, required, isFunction, isString } = require("validatem");
|
||||||
|
const assureArray = require("assure-array");
|
||||||
|
const isIterable = require("is-iterable");
|
||||||
|
|
||||||
|
module.exports = function findInTree(options) {
|
||||||
|
validateOptions(arguments, {
|
||||||
|
tree: [ required ],
|
||||||
|
predicate: [ required, isFunction ],
|
||||||
|
childrenProperty: [ isString ],
|
||||||
|
});
|
||||||
|
|
||||||
|
let childrenProperty = options.childrenProperty ?? "children";
|
||||||
|
let topLevelItems = assureArray(options.tree);
|
||||||
|
let predicate = options.predicate;
|
||||||
|
|
||||||
|
function find(items) {
|
||||||
|
if (isIterable(items)) {
|
||||||
|
for (let item of items) {
|
||||||
|
if (predicate(item)) {
|
||||||
|
return item;
|
||||||
|
} else {
|
||||||
|
let childResult = find(item[childrenProperty]);
|
||||||
|
|
||||||
|
if (childResult !== undefined) {
|
||||||
|
return childResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return find(topLevelItems);
|
||||||
|
};
|
@ -0,0 +1,126 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const objectFromEntries = require("object.fromentries");
|
||||||
|
const util = require("util");
|
||||||
|
|
||||||
|
function resolveFromDataSource(dataContext, dataSource, id) {
|
||||||
|
if (dataContext[dataSource] != null) {
|
||||||
|
return dataContext[dataSource].load(id);
|
||||||
|
} else {
|
||||||
|
throw new Error(`Specified data source '${dataSource}' does not exist`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function withProperty(dataSource, id, property) {
|
||||||
|
return withData(dataSource, id, (value) => {
|
||||||
|
return value[property];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function withData(dataSource, id, callback) {
|
||||||
|
return function (args, context) {
|
||||||
|
let { data } = context;
|
||||||
|
|
||||||
|
return Promise.try(() => {
|
||||||
|
return resolveFromDataSource(data, dataSource, id);
|
||||||
|
}).then((value) => {
|
||||||
|
if (value != null) {
|
||||||
|
// FIXME: Inject 'properties'
|
||||||
|
return callback(value, args, context);
|
||||||
|
} else {
|
||||||
|
// QUESTION: Why do we disallow this again?
|
||||||
|
throw new Error(`Got a null-ish value from data source '${dataSource}' for ID '${util.inspect(id)}'`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function withDynamicHandler(handler, object) {
|
||||||
|
return function (args, context) {
|
||||||
|
let { data } = context;
|
||||||
|
|
||||||
|
function resolveProperty(property, fromObject = object) {
|
||||||
|
if (typeof fromObject[property] !== "function") {
|
||||||
|
throw new Error(`FIXME: Properties can apparently be non-functions`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return fromObject[property](args, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
let extendedContext = {
|
||||||
|
... context,
|
||||||
|
resolveProperty: resolveProperty,
|
||||||
|
resolveProperties: function (properties, fromObject) {
|
||||||
|
return Promise.map(properties, (property) => {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return resolveProperty(property, fromObject);
|
||||||
|
}).then((value) => {
|
||||||
|
return [ property, value ];
|
||||||
|
});
|
||||||
|
}).then((entries) => {
|
||||||
|
return objectFromEntries(entries);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
resolvePropertyPath: function (propertyPath, fromObject) {
|
||||||
|
let initialObject = fromObject ?? object;
|
||||||
|
|
||||||
|
return Promise.reduce(propertyPath, (last, property) => {
|
||||||
|
if (last != null) {
|
||||||
|
return resolveProperty(property, last);
|
||||||
|
}
|
||||||
|
}, initialObject);
|
||||||
|
},
|
||||||
|
resolveDataSource: function (dataSource, id) {
|
||||||
|
return resolveFromDataSource(data, dataSource, id);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return handler(args, extendedContext);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let ID = Symbol("ID");
|
||||||
|
let LocalProperties = Symbol("LocalProperties");
|
||||||
|
let Dynamic = Symbol("Dynamic");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
ID: ID,
|
||||||
|
Dynamic: Dynamic,
|
||||||
|
LocalProperties: LocalProperties,
|
||||||
|
createDataObject: function createDataObject(mappings) {
|
||||||
|
let object = {};
|
||||||
|
|
||||||
|
if (mappings[LocalProperties] != null) {
|
||||||
|
Object.assign(object, mappings[LocalProperties]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mappings[Dynamic] != null) {
|
||||||
|
for (let [property, handler] of Object.entries(mappings[Dynamic])) {
|
||||||
|
object[property] = withDynamicHandler(handler, object);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let [dataSource, items] of Object.entries(mappings)) {
|
||||||
|
if (items[ID] != null) {
|
||||||
|
let id = items[ID];
|
||||||
|
|
||||||
|
for (let [property, source] of Object.entries(items)) {
|
||||||
|
if (object[property] == null) {
|
||||||
|
if (typeof source === "string") {
|
||||||
|
object[property] = withProperty(dataSource, id, source);
|
||||||
|
} else if (typeof source === "function") {
|
||||||
|
object[property] = withData(dataSource, id, source);
|
||||||
|
} /* FIXME: else */
|
||||||
|
} else {
|
||||||
|
throw new Error(`Handler already defined for property '${property}' - maybe you specified it twice for different data sources?`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(`No object ID was provided for the '${dataSource}' data source`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return object;
|
||||||
|
}
|
||||||
|
};
|
@ -0,0 +1,17 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const graphql = require("graphql");
|
||||||
|
|
||||||
|
module.exports = function createGraphQLInterface(schema, options, root) {
|
||||||
|
return function makeQuery(query, args) {
|
||||||
|
return graphql.graphql({
|
||||||
|
schema: schema,
|
||||||
|
source: query,
|
||||||
|
rootValue: root,
|
||||||
|
contextValue: {
|
||||||
|
data: (options.loaderFactory != null) ? options.loaderFactory() : {}
|
||||||
|
},
|
||||||
|
variableValues: args
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,9 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const objectFromEntries = require("object.fromentries");
|
||||||
|
|
||||||
|
module.exports = function itemsToObject(items) {
|
||||||
|
// Maps Array<{key, value}> to an Object<key, value>
|
||||||
|
let entries = items.map(({ key, value }) => [ key, value ]);
|
||||||
|
return objectFromEntries(entries);
|
||||||
|
};
|
@ -0,0 +1,15 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
// FIXME: Finish this later
|
||||||
|
|
||||||
|
const { validateArguments, required, isString, isFunction } = require("validatem");
|
||||||
|
|
||||||
|
module.exports = function mapTree(tree, predicate, childrenProperty) {
|
||||||
|
validateArguments(arguments, [
|
||||||
|
[ "tree", required ],
|
||||||
|
[ "predicate", required, isFunction ],
|
||||||
|
[ "childrenProperty", isString ]
|
||||||
|
]);
|
||||||
|
|
||||||
|
|
||||||
|
};
|
@ -0,0 +1,25 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const { validateArguments, required } = require("@validatem/core");
|
||||||
|
const isString = require("@validatem/is-string");
|
||||||
|
const isRegularExpression = require("@validatem/is-regular-expression");
|
||||||
|
|
||||||
|
module.exports = function matchOrError(regex, string) {
|
||||||
|
validateArguments(arguments, [
|
||||||
|
[ "regex", required, isRegularExpression ],
|
||||||
|
[ "string", required, isString ]
|
||||||
|
]);
|
||||||
|
|
||||||
|
let match = regex.exec(string);
|
||||||
|
|
||||||
|
if (match == null) {
|
||||||
|
throw new Error(`Regular expression ${regex.toString()} failed to match on string: ${string}`);
|
||||||
|
} else {
|
||||||
|
// NOTE: Follows `execall` format: https://www.npmjs.com/package/execall
|
||||||
|
return {
|
||||||
|
match: match[0],
|
||||||
|
subMatches: match.slice(1),
|
||||||
|
index: match.index
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
@ -0,0 +1,9 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = function maybePrefix(prefix, text) {
|
||||||
|
if (text == null) {
|
||||||
|
return text;
|
||||||
|
} else {
|
||||||
|
return `${prefix} ${text}`;
|
||||||
|
}
|
||||||
|
};
|
@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const parseIECBytes = require("./bytes/iec");
|
const parseIECBytes = require("../parse-bytes-iec");
|
||||||
|
|
||||||
/* NOTE: This parsing module is for a special case; values that are normally handled using the Linux kernel's `memparse` function (which parses memory suffixes like K, M, G, etc.) but that do *not* semantically represent bytes. */
|
/* NOTE: This parsing module is for a special case; values that are normally handled using the Linux kernel's `memparse` function (which parses memory suffixes like K, M, G, etc.) but that do *not* semantically represent bytes. */
|
||||||
|
|
@ -0,0 +1,5 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = function shallowMerge(... objects) {
|
||||||
|
return Object.assign({}, ... objects);
|
||||||
|
};
|
@ -0,0 +1,10 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = function createJsonParser() {
|
||||||
|
return {
|
||||||
|
supportsStreams: false,
|
||||||
|
parse: function (text) {
|
||||||
|
return JSON.parse(text);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,73 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const pegjs = require("pegjs");
|
||||||
|
const { validateOptions, either, required, isString, isPlainObject, allowExtraProperties } = require("validatem");
|
||||||
|
const fs = require("fs");
|
||||||
|
const moduleEval = require("eval");
|
||||||
|
const vm = require("vm");
|
||||||
|
const asExpression = require("as-expression");
|
||||||
|
const textParser = require("../text-parser");
|
||||||
|
|
||||||
|
module.exports = function createPegParser({ grammar, grammarFile, options }) {
|
||||||
|
validateOptions(arguments, [
|
||||||
|
{
|
||||||
|
grammar: [ isString ],
|
||||||
|
grammarFile: [ isString ],
|
||||||
|
options: [ isPlainObject ]
|
||||||
|
},
|
||||||
|
// FIXME: require-either
|
||||||
|
either(
|
||||||
|
allowExtraProperties({ grammar: [ required ] }),
|
||||||
|
allowExtraProperties({ grammarFile: [ required ] })
|
||||||
|
)
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (grammarFile != null) {
|
||||||
|
// FIXME: cache
|
||||||
|
grammar = fs.readFileSync(grammarFile, "utf8");
|
||||||
|
}
|
||||||
|
|
||||||
|
let parserCode = pegjs.generate(grammar, {
|
||||||
|
... options,
|
||||||
|
output: "source",
|
||||||
|
format: "commonjs"
|
||||||
|
});
|
||||||
|
|
||||||
|
let parser = asExpression(() => {
|
||||||
|
if (grammarFile != null) {
|
||||||
|
return moduleEval(parserCode, grammarFile, {}, true);
|
||||||
|
} else {
|
||||||
|
let exports_ = {};
|
||||||
|
|
||||||
|
let sandbox = {
|
||||||
|
exports: exports_,
|
||||||
|
module: {
|
||||||
|
exports: exports_,
|
||||||
|
},
|
||||||
|
require: function () {
|
||||||
|
throw new Error("You cannot use require() when loading a grammar as a string; use the `grammarFile` option instead");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let script = new vm.Script(parserCode.replace(/^\#\!.*/, ''));
|
||||||
|
script.runInNewContext(sandbox);
|
||||||
|
|
||||||
|
return sandbox.module.exports;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
supportsStreams: false,
|
||||||
|
parse: function (text) {
|
||||||
|
try {
|
||||||
|
return parser.parse(text);
|
||||||
|
} catch (error) {
|
||||||
|
if (error.name === "SyntaxError") {
|
||||||
|
throw textParser.NoResult.chain(error, "Parsing output failed");
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
@ -0,0 +1,11 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const errorChain = require("error-chain");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
parse: function parseText(text, parser) {
|
||||||
|
return parser.parse(text);
|
||||||
|
},
|
||||||
|
// FIXME: Force global implementation!
|
||||||
|
NoResult: errorChain.create("NoResult")
|
||||||
|
};
|
@ -0,0 +1,11 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const path = require("path");
|
||||||
|
const fs = require("fs");
|
||||||
|
|
||||||
|
const parseText = require("./");
|
||||||
|
const createPegAdapter = require("./peg");
|
||||||
|
|
||||||
|
let pegAdapter = createPegAdapter(fs.readFileSync(path.join(__dirname, "test.pegjs"), "utf8"));
|
||||||
|
|
||||||
|
console.log(parseText("hello mars", pegAdapter));
|
@ -0,0 +1,13 @@
|
|||||||
|
root
|
||||||
|
= "hello" _ location:location {
|
||||||
|
return { location };
|
||||||
|
}
|
||||||
|
|
||||||
|
_
|
||||||
|
= ("\t" / " ")+ {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
location
|
||||||
|
= "world"
|
||||||
|
/ "earth"
|
@ -0,0 +1,6 @@
|
|||||||
|
## API
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
NOTE: supportRebuild can be disabled to prevent the consuming code from using the resulting list with `rebuild`, eg. when you're going to provide a filtered list to the consumer (that would not rebuild correctly)
|
||||||
|
TODO: Maybe make this more abuse-proof by also exposing (and requiring) a `sequenceNumber` property and having the rebuild fail when it's not consecutive?
|
@ -0,0 +1,33 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const treecutter = require("./");
|
||||||
|
const util = require("util");
|
||||||
|
|
||||||
|
function log(value) {
|
||||||
|
console.log(util.inspect(value, { colors: true, depth: null }));
|
||||||
|
}
|
||||||
|
|
||||||
|
let tree = [{
|
||||||
|
name: "a",
|
||||||
|
children: [
|
||||||
|
{ name: "a1" },
|
||||||
|
{ name: "a2",
|
||||||
|
children: [
|
||||||
|
{ name: "a2a" },
|
||||||
|
{ name: "a2b" },
|
||||||
|
{ name: "a2c" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}, {
|
||||||
|
name: "b"
|
||||||
|
}];
|
||||||
|
|
||||||
|
let flattened = treecutter.flatten(tree);
|
||||||
|
|
||||||
|
log(flattened);
|
||||||
|
|
||||||
|
let rebuilt = treecutter.rebuild(flattened);
|
||||||
|
|
||||||
|
log(rebuilt);
|
||||||
|
|
@ -0,0 +1,111 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const { validateArguments, required, isString, isArray, ValidationError } = require("validatem");
|
||||||
|
const assureArray = require("assure-array");
|
||||||
|
const shallowMerge = require("../shallow-merge");
|
||||||
|
|
||||||
|
function createListValidator() {
|
||||||
|
let lastSequenceNumber = null;
|
||||||
|
|
||||||
|
return function isTreecutterList(value) {
|
||||||
|
isArray(value);
|
||||||
|
|
||||||
|
if (value.some((item) => item._treecutterDepth == null || item._treecutterSequenceNumber == null)) {
|
||||||
|
throw new ValidationError(`Must be a treecutter-generated list of items`);
|
||||||
|
} else if (lastSequenceNumber != null && value._treecutterSequenceNumber !== lastSequenceNumber + 1) {
|
||||||
|
throw new ValidationError(`Must be the original, unfiltered, unsorted treecutter-generated list of items`);
|
||||||
|
} else {
|
||||||
|
lastSequenceNumber = value._treecutterSequenceNumber;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let validateTreecutterOptions = {
|
||||||
|
childrenProperty: isString
|
||||||
|
};
|
||||||
|
|
||||||
|
function defaultOptions(options = {}) {
|
||||||
|
return {
|
||||||
|
childrenProperty: options.childrenProperty ?? "children"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
flatten: function (tree, options) {
|
||||||
|
validateArguments(arguments, [
|
||||||
|
[ "tree", required ],
|
||||||
|
[ "options", validateTreecutterOptions ]
|
||||||
|
]);
|
||||||
|
|
||||||
|
let { childrenProperty } = defaultOptions(options);
|
||||||
|
|
||||||
|
let rootItems = assureArray(tree);
|
||||||
|
let list = [];
|
||||||
|
let sequenceNumber = 0;
|
||||||
|
|
||||||
|
function add(items, depth) {
|
||||||
|
for (let item of items) {
|
||||||
|
let listItem = shallowMerge(item, {
|
||||||
|
_treecutterDepth: depth,
|
||||||
|
_treecutterSequenceNumber: sequenceNumber
|
||||||
|
});
|
||||||
|
|
||||||
|
// listItem is a copy, so we can do this safely
|
||||||
|
delete listItem[childrenProperty];
|
||||||
|
|
||||||
|
list.push(listItem);
|
||||||
|
sequenceNumber += 1;
|
||||||
|
|
||||||
|
if (item[childrenProperty] != null) {
|
||||||
|
add(item[childrenProperty], depth + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
add(rootItems, 0);
|
||||||
|
|
||||||
|
return list;
|
||||||
|
},
|
||||||
|
rebuild: function (list, options) {
|
||||||
|
let isTreecutterList = createListValidator();
|
||||||
|
|
||||||
|
validateArguments(arguments, [
|
||||||
|
[ "list", required, isTreecutterList ],
|
||||||
|
[ "options", validateTreecutterOptions ]
|
||||||
|
]);
|
||||||
|
|
||||||
|
let { childrenProperty } = defaultOptions(options);
|
||||||
|
|
||||||
|
let topLevel = [];
|
||||||
|
let stack = [];
|
||||||
|
let currentDepth = list[0]?._treecutterDepth;
|
||||||
|
|
||||||
|
for (let item of list) {
|
||||||
|
let depth = item._treecutterDepth;
|
||||||
|
|
||||||
|
let treeItem = shallowMerge(item, {
|
||||||
|
[childrenProperty]: []
|
||||||
|
});
|
||||||
|
|
||||||
|
// Again, we're operating on a copy.
|
||||||
|
delete treeItem._treecutterDepth;
|
||||||
|
delete treeItem._treecutterSequenceNumber;
|
||||||
|
|
||||||
|
if (depth >= 0 && depth <= currentDepth + 1) {
|
||||||
|
if (depth === 0) {
|
||||||
|
topLevel.push(treeItem);
|
||||||
|
} else {
|
||||||
|
stack[depth - 1][childrenProperty].push(treeItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
currentDepth = depth;
|
||||||
|
stack[depth] = treeItem;
|
||||||
|
stack.splice(depth + 1); // Remove references higher in the stack, to decrease the chance of a silent failure if there's a bug in the code
|
||||||
|
} else {
|
||||||
|
throw new Error(`Encountered an invalid item depth; the item's depth is ${depth}, but the current tree depth is ${currentDepth}; if this list was generated by treecutter, please file a bug!`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return topLevel;
|
||||||
|
}
|
||||||
|
};
|
@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const makeUnits = require("../../make-units");
|
const makeUnits = require("../make-units");
|
||||||
|
|
||||||
module.exports = makeUnits([
|
module.exports = makeUnits([
|
||||||
{unit: "B", toNext: 1024},
|
{unit: "B", toNext: 1024},
|
@ -0,0 +1,6 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = function unreachable(reason) {
|
||||||
|
// TODO: Parse the package name out of the stacktrace and include it in the error message?
|
||||||
|
throw new Error(`${reason} -- this is a bug, please report it!`);
|
||||||
|
};
|
@ -1,9 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
module.exports = function prefixTitle(prefix, title) {
|
|
||||||
if (title == null) {
|
|
||||||
return title;
|
|
||||||
} else {
|
|
||||||
return `${prefix} ${title}`;
|
|
||||||
}
|
|
||||||
};
|
|
@ -0,0 +1,8 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const matchOrError = require("../packages/match-or-error");
|
||||||
|
|
||||||
|
module.exports = function deviceNameFromPath(path) {
|
||||||
|
let [ name ] = matchOrError(/^\/dev\/(.+)$/, path).subMatches;
|
||||||
|
return name;
|
||||||
|
};
|
@ -1,31 +1,20 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const joi = require("joi");
|
const isString = require("@validatem/is-string");
|
||||||
|
const required = require("@validatem/required");
|
||||||
|
const dynamic = require("@validatem/dynamic");
|
||||||
|
const when = require("@validatem/when");
|
||||||
|
|
||||||
// const checkit = require("checkit");
|
module.exports = dynamic((object) => ({
|
||||||
// const oneOf = require("../../validators/one-of");
|
name: [ required, isString ],
|
||||||
|
description: [ isString ],
|
||||||
module.exports = joi.object({
|
source: [ required, isString ],
|
||||||
name: joi.string().required(),
|
url: [
|
||||||
description: joi.string(),
|
isString,
|
||||||
source: joi.string().required(),
|
when(() => object.source === "http", [ required ])
|
||||||
url: joi.when("source", { is: "http", then: joi.string().required() }),
|
],
|
||||||
path: joi.when("source", { is: "local", then: joi.string().required() })
|
path: [
|
||||||
});
|
isString,
|
||||||
|
when(() => object.source === "local", [ required ])
|
||||||
// module.exports = checkit({
|
]
|
||||||
// name: "string",
|
}));
|
||||||
// description: "string",
|
|
||||||
// source: ["required", "string", oneOf([
|
|
||||||
// "local",
|
|
||||||
// "http"
|
|
||||||
// ])]
|
|
||||||
// }).maybe({
|
|
||||||
// url: ["required", "string"]
|
|
||||||
// }, (input) => {
|
|
||||||
// return (input.source === "http");
|
|
||||||
// }).maybe({
|
|
||||||
// path: ["required", "string"]
|
|
||||||
// }, (input) => {
|
|
||||||
// return (input.source === "local");
|
|
||||||
// });
|
|
||||||
|
@ -1,51 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
const Promise = require("bluebird");
|
|
||||||
|
|
||||||
const execBinary = require("../exec-binary");
|
|
||||||
const parseIECBytes = require("../parse/bytes/iec");
|
|
||||||
const mapValue = require("../map-value");
|
|
||||||
|
|
||||||
function parseBoolean(value) {
|
|
||||||
return mapValue(value, {
|
|
||||||
0: false,
|
|
||||||
1: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function mapType(value) {
|
|
||||||
return mapValue(value, {
|
|
||||||
part: "partition",
|
|
||||||
disk: "disk",
|
|
||||||
loop: "loopDevice"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function mapDeviceList(devices) {
|
|
||||||
return devices.map((device) => {
|
|
||||||
return {
|
|
||||||
name: device.name,
|
|
||||||
type: mapType(device.type),
|
|
||||||
mountpoint: device.mountpoint,
|
|
||||||
deviceNumber: device["maj:min"],
|
|
||||||
removable: parseBoolean(device.rm),
|
|
||||||
readOnly: parseBoolean(device.ro),
|
|
||||||
size: parseIECBytes(device.size),
|
|
||||||
children: (device.children != null) ? mapDeviceList(device.children) : []
|
|
||||||
};
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = function lsblk() {
|
|
||||||
return Promise.try(() => {
|
|
||||||
return execBinary("lsblk")
|
|
||||||
.withFlags({ json: true })
|
|
||||||
.singleResult()
|
|
||||||
.expectJsonStdout((result) => {
|
|
||||||
return mapDeviceList(result.blockdevices);
|
|
||||||
})
|
|
||||||
.execute();
|
|
||||||
}).then((output) => {
|
|
||||||
return output.result;
|
|
||||||
});
|
|
||||||
};
|
|
@ -1,159 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
const Promise = require("bluebird");
|
|
||||||
|
|
||||||
const execBinary = require("../exec-binary");
|
|
||||||
const {B} = require("../units/bytes/iec");
|
|
||||||
const matchOrError = require("../match-or-error");
|
|
||||||
const errors = require("../errors");
|
|
||||||
const mapValue = require("../map-value");
|
|
||||||
|
|
||||||
/* FIXME: Error handling, eg. device not found errors */
|
|
||||||
|
|
||||||
function mapAttributeFlags(flagString) {
|
|
||||||
|
|
||||||
let flagBuffer = Buffer.from(flagString.slice(2), "hex");
|
|
||||||
let flagByte = flagBuffer.readUInt16BE(0);
|
|
||||||
|
|
||||||
if (flagByte & 128 || flagByte & 64) {
|
|
||||||
throw new Error(`Encountered unknown flag byte in flag ${flagString}`);
|
|
||||||
} else {
|
|
||||||
return {
|
|
||||||
autoKeep: Boolean(flagByte & 32),
|
|
||||||
eventCount: Boolean(flagByte & 16),
|
|
||||||
errorRate: Boolean(flagByte & 8),
|
|
||||||
affectsPerformance: Boolean(flagByte & 4),
|
|
||||||
updatedOnline: Boolean(flagByte & 2),
|
|
||||||
indicatesFailure: Boolean(flagByte & 1),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
attributes: function ({ devicePath }) {
|
|
||||||
return Promise.try(() => {
|
|
||||||
return execBinary("smartctl", [devicePath])
|
|
||||||
.asRoot()
|
|
||||||
.withFlags({ attributes: true })
|
|
||||||
.singleResult()
|
|
||||||
.expectStdout("attributes", /^\s*([0-9]+)\s+([a-zA-Z_-]+)\s+(0x[0-9a-f]{4})\s+([0-9]{3})\s+([0-9]{3})\s+([0-9]{3})\s+(Pre-fail|Old_age)\s+(Always|Offline)\s+(FAILING_NOW|In_the_past|-)\s+(.+)$/gm, {
|
|
||||||
required: true,
|
|
||||||
matchAll: true,
|
|
||||||
result: ([id, attributeName, flags, value, worst, threshold, type, updatedWhen, failedWhen, rawValue]) => {
|
|
||||||
return {
|
|
||||||
id: parseInt(id),
|
|
||||||
name: attributeName,
|
|
||||||
flags: mapAttributeFlags(flags),
|
|
||||||
value: parseInt(value),
|
|
||||||
rawValue: rawValue,
|
|
||||||
worstValueSeen: parseInt(worst),
|
|
||||||
failureThreshold: parseInt(threshold),
|
|
||||||
type: mapValue(type, {
|
|
||||||
"Pre-fail": "preFail",
|
|
||||||
"Old_age": "oldAge"
|
|
||||||
}),
|
|
||||||
failingNow: (failedWhen === "FAILING_NOW"),
|
|
||||||
/* TODO: Should the below include the FAILING_NOW state? */
|
|
||||||
failedBefore: (failedWhen === "In_the_past"),
|
|
||||||
updatedWhen: mapValue(updatedWhen, {
|
|
||||||
"Always": "always",
|
|
||||||
"Offline": "offline"
|
|
||||||
})
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.execute();
|
|
||||||
}).then((output) => {
|
|
||||||
return output.result;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
info: function ({ devicePath }) {
|
|
||||||
return Promise.try(() => {
|
|
||||||
return execBinary("smartctl", [devicePath])
|
|
||||||
.asRoot()
|
|
||||||
.withFlags({ info: true })
|
|
||||||
.expectStdout("smartAvailable", /^SMART support is:\s*(Available|Unavailable|Ambiguous).+$/m, {
|
|
||||||
result: ([availability]) => {
|
|
||||||
return mapValue(availability, {
|
|
||||||
Available: true,
|
|
||||||
Unavailable: false,
|
|
||||||
Ambiguous: null
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expectStdout("model", /^Device Model:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("modelFamily", /^Model Family:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("serialNumber", /^Serial Number:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("wwn", /^LU WWN Device Id:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("firmwareVersion", /^Firmware Version:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("size", /^User Capacity:\s*(.+)$/m, {
|
|
||||||
result: ([value]) => {
|
|
||||||
try {
|
|
||||||
let match = matchOrError(/^([0-9,]+) bytes \[[^\]]+\]$/, value);
|
|
||||||
return B(parseInt(match[0].replace(/,/g, "")));
|
|
||||||
} catch (error) {
|
|
||||||
throw errors.UnexpectedOutput.chain(error, "Could not parse drive capacity", { input: value });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expectStdout("rpm", /^Rotation Rate:\s*(.+)$/m, {
|
|
||||||
result: ([value]) => {
|
|
||||||
try {
|
|
||||||
let match = matchOrError(/^([0-9]+) rpm$/, value);
|
|
||||||
return parseInt(match[0]);
|
|
||||||
} catch (error) {
|
|
||||||
throw errors.UnexpectedOutput.chain(error, "Could not parse drive RPM", { input: value });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expectStdout("sectorSizes", /^Sector Sizes:\s*(.+)$/m, {
|
|
||||||
result: ([value]) => {
|
|
||||||
try {
|
|
||||||
let match = matchOrError(/^([0-9]+) bytes logical, ([0-9]+) bytes physical$/, value);
|
|
||||||
|
|
||||||
return {
|
|
||||||
logical: B(parseInt(match[0])),
|
|
||||||
physical: B(parseInt(match[1]))
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
throw errors.UnexpectedOutput.chain(error, "Could not parse drive sector sizes", { input: value });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.expectStdout("formFactor", /^Form Factor:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("ataVersion", /^ATA Version is:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("sataVersion", /^SATA Version is:\s*(.+)$/m, { result: ([value]) => value })
|
|
||||||
.expectStdout("smartEnabled", /^SMART support is:\s*(Enabled|Disabled)$/m, {
|
|
||||||
result: ([value]) => {
|
|
||||||
return mapValue(value, {
|
|
||||||
Enabled: true,
|
|
||||||
Disabled: false
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.execute();
|
|
||||||
}).then((output) => {
|
|
||||||
return output.result;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
scan: function () {
|
|
||||||
return Promise.try(() => {
|
|
||||||
return execBinary("smartctl")
|
|
||||||
.asRoot()
|
|
||||||
.withFlags({ scan: true })
|
|
||||||
.singleResult()
|
|
||||||
.expectStdout("devices", /^([^ ]+) -d ([^ ]+) #.+$/gm, {
|
|
||||||
matchAll: true,
|
|
||||||
result: ([devicePath, interface_]) => {
|
|
||||||
return {
|
|
||||||
path: devicePath,
|
|
||||||
interface: interface_
|
|
||||||
};
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.execute();
|
|
||||||
}).then((output) => {
|
|
||||||
return output.result;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
@ -0,0 +1,58 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
const Promise = require("bluebird");
|
||||||
|
const path = require("path");
|
||||||
|
|
||||||
|
const execBinary = require("../exec-binary");
|
||||||
|
const createPegParser = require("../text-parser-pegjs");
|
||||||
|
const itemsToObject = require("../../packages/items-to-object");
|
||||||
|
|
||||||
|
/* FIXME: Error handling, eg. device not found errors */
|
||||||
|
|
||||||
|
function outputParser(rootRule) {
|
||||||
|
return createPegParser({
|
||||||
|
grammarFile: path.join(__dirname, "./parser.pegjs"),
|
||||||
|
options: {
|
||||||
|
allowedStartRules: [ rootRule ]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
attributes: function ({ devicePath }) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("smartctl", [devicePath])
|
||||||
|
.asRoot()
|
||||||
|
.withFlags({ attributes: true })
|
||||||
|
.requireOnStdout(outputParser("RootAttributes"))
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
// NOTE: Ignore the header, for now
|
||||||
|
return output.result.attributes;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
info: function ({ devicePath }) {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("smartctl", [devicePath])
|
||||||
|
.asRoot()
|
||||||
|
.withFlags({ info: true })
|
||||||
|
.requireOnStdout(outputParser("RootInfo"))
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
// NOTE: Ignore the header, for now
|
||||||
|
return itemsToObject(output.result.fields);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
scan: function () {
|
||||||
|
return Promise.try(() => {
|
||||||
|
return execBinary("smartctl")
|
||||||
|
.asRoot()
|
||||||
|
.withFlags({ scan: true })
|
||||||
|
.requireOnStdout(outputParser("RootScan"))
|
||||||
|
.execute();
|
||||||
|
}).then((output) => {
|
||||||
|
// NOTE: Ignore the header, for now
|
||||||
|
return output.result.devices;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
Loading…
Reference in New Issue