feature/node-rewrite
Sven Slootweg 4 years ago
parent 5be1872be3
commit b9fc50c0d2

@ -0,0 +1,16 @@
{
"extends": "@joepie91/eslint-config/react",
"parserOptions": {
"ecmaVersion": 2020,
"sourceType": "script"
},
"parser": "babel-eslint",
"plugins": [
"babel",
"import"
],
"rules": {
"import/no-extraneous-dependencies": 2,
"import/no-unresolved": [2, { "commonjs": true }]
}
}

@ -1,78 +0,0 @@
module.exports = {
"env": {
"browser": true,
"commonjs": true,
"es6": true,
"node": true
},
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true,
"jsx": true
}
},
"plugins": [
"react"
],
"rules": {
/* Things that should effectively be syntax errors. */
"indent": [ "error", "tab", {
SwitchCase: 1
}],
"linebreak-style": [ "error", "unix" ],
"semi": [ "error", "always" ],
/* Things that are always mistakes. */
"getter-return": [ "error" ],
"no-compare-neg-zero": [ "error" ],
"no-dupe-args": [ "error" ],
"no-dupe-keys": [ "error" ],
"no-duplicate-case": [ "error" ],
"no-empty": [ "error" ],
"no-empty-character-class": [ "error" ],
"no-ex-assign": [ "error" ],
"no-extra-semi": [ "error" ],
"no-func-assign": [ "error" ],
"no-invalid-regexp": [ "error" ],
"no-irregular-whitespace": [ "error" ],
"no-obj-calls": [ "error" ],
"no-sparse-arrays": [ "error" ],
"no-undef": [ "error" ],
"no-unreachable": [ "error" ],
"no-unsafe-finally": [ "error" ],
"use-isnan": [ "error" ],
"valid-typeof": [ "error" ],
"curly": [ "error" ],
"no-caller": [ "error" ],
"no-fallthrough": [ "error" ],
"no-extra-bind": [ "error" ],
"no-extra-label": [ "error" ],
"array-callback-return": [ "error" ],
"prefer-promise-reject-errors": [ "error" ],
"no-with": [ "error" ],
"no-useless-concat": [ "error" ],
"no-unused-labels": [ "error" ],
"no-unused-expressions": [ "error" ],
"no-unused-vars": [ "error" , { argsIgnorePattern: "^_" } ],
"no-return-assign": [ "error" ],
"no-self-assign": [ "error" ],
"no-new-wrappers": [ "error" ],
"no-redeclare": [ "error" ],
"no-loop-func": [ "error" ],
"no-implicit-globals": [ "error" ],
"strict": [ "error", "global" ],
/* Make JSX not cause 'unused variable' errors. */
"react/jsx-uses-react": ["error"],
"react/jsx-uses-vars": ["error"],
/* Development code that should be removed before deployment. */
"no-console": [ "warn" ],
"no-constant-condition": [ "warn" ],
"no-debugger": [ "warn" ],
"no-alert": [ "warn" ],
"no-warning-comments": ["warn", {
terms: ["fixme"]
}],
/* Common mistakes that can *occasionally* be intentional. */
"no-template-curly-in-string": ["warn"],
"no-unsafe-negation": [ "warn" ],
}
};

4
.gitignore vendored

@ -1,4 +1,6 @@
config.json config.json
node_modules node_modules
images images
disks disks
yarn-error.log
junk

@ -0,0 +1,31 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "attach",
"name": "Attach to Process",
"address": "localhost",
"port": 9229,
"localRoot": "${workspaceFolder}",
"remoteRoot": "${workspaceFolder}",
"restart": true,
"skipFiles": [
"<node_internals>/**",
"node_modules/**"
],
},
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/bin/server.js",
"skipFiles": [
"<node_internals>/**"
],
}
]
}

@ -0,0 +1 @@
{ "presets": ["@babel/preset-env"] }

@ -1,17 +1,20 @@
"use strict"; "use strict";
// FIXME: Use this in dev only, have a prod compile/build step
require("@babel/register");
const budoExpress = require("budo-express"); const budoExpress = require("budo-express");
const path = require("path"); const path = require("path");
budoExpress({ budoExpress({
port: 8000, port: 8000,
debug: true, sourceMaps: true,
expressApp: require("../src/app")(), expressApp: require("../src/app")(),
basePath: path.resolve(__dirname, ".."), basePath: path.resolve(__dirname, ".."),
entryPath: "src/client/index.jsx", entryFiles: "src/client/index.jsx",
publicPath: "public", staticPath: "public",
bundlePath: "js/bundle.js", bundlePath: "js/bundle.js",
livereload: "**/*.{css,html}", livereloadPattern: "**/*.{css,html}",
browserify: { browserify: {
extensions: [".jsx"], extensions: [".jsx"],
plugin: [ plugin: [
@ -24,4 +27,4 @@ budoExpress({
}] }]
] ]
}, },
}); });

@ -1,17 +1,18 @@
'use strict'; 'use strict';
const postgresqlSocketUrl = require("postgresql-socket-url");
const config = require("./config.json"); const config = require("./config.json");
module.exports = { module.exports = {
client: "pg", client: "pg",
connection: { connection: {
database: "cvm", connectionString: postgresqlSocketUrl({
charset: "utf8", socketPath: "/tmp",
username: config.database.username, database: config.database
password: config.database.password })
}, },
pool: { pool: {
min: 2, min: 2,
max: 10 max: 10
} }
} };

@ -0,0 +1,140 @@
MARKER:
- Replace local `unreachable` with @joepie91/unreachable
- Update all Validatem usage to new validateArguments API
- LVM / mdraid support and tabs (+ complete refactoring LVM implementation)
- Switch hashing to argon2id
- Switch child_process to execa
IDEAS:
- contextual sidebar on add/edit form pages that shows/highlights all the relevant data for deciding what to fill into the form
- eg. all storage devices and pools when creating a new volume
- or highlighting the currently-editing volume in an edit screen
----------------------
API architecture
- Level 0: (src/wrappers) Data source implementations
- Eg. output-parsing wrappers using `execBinary`, but implementations might also be provided by a third-party module entirely
- The APIs for these are specific to the implementation
- Level 1: (src/api/data-sources) Data source connectors
- These provide a standardized interface over the data source implementations, exposing each individual semantically distinct operation as a function
- That function takes either of:
- An array of identifiers of 'items' to obtain information about
- The `All` symbol to obtain all items
- Level 2: (src/graphql/data-object) The 'data object' abstraction
- Takes in a definition of a GraphQL object's structure, and which properties should be obtained from what data source connectors
- Definition structured as (dataSource => (field => dataGetter))
- The `dataSource` may either be:
- The name of the data source connector to obtain the source data from
- The special `LocalProperties` symbol, which specifies:
- Data that is immediately known upon object instantiation, and doesn't require accessing a data source
- Eg. the identifier that the object was initialized with
- Functions that produce data objects of other types, the instantiation of which doesn't require accessing a data source
- Eg. because it is initialized with the same identifier
- The `field` may either be:
- A string name, in which case it defines how to resolve that specific property on the data object
- The special `ID` symbol, in which case it defines by which identifier to request the 'thing' from the data source connector.
- Usually this will be the identifier that the data object is initialized with.
- The `dataGetter` is either:
- A function, mapping from the source data to a value, called with (sourceData, queryArgs, context)
- sourceData: The result object originating from the data source lookup
- queryArgs: The arguments passed to the property access in the GraphQL query
- context: The full GraphQL context + 'properties' key if DependsOn is used
- A string, specifying the property to extract from the source data, equivalent to `(sourceData) => sourceData[property]`
- NOTE: The dataSources are not specified directly in the data object definition! They're provided via GraphQL context separately.
- Level 3: (src/api/types)
- The actual data object definitions
- Parametric modules, take the full set of types as their argument
- Specified as a *function that instantiates and returns* a newly created data object, when initialized with some sort of identifier value
- Eg. the 'path' for a block device, or the 'ID' for a user
- The instantiation function is free to choose the arguments it accepts for initialization (and how to use them), but a destructured object is recommended
------------
Dynamic data lookup
Sometimes there are special cases where we can't (reliably) obtain particular data from the same source, eg. the correct data source connector to invoke may be dependent on some other data in the object. Need to figure out an API that allows representing this ergonomically.
Maybe have an async "resolve these data sources" API that can be used from within a custom handler? This would sidestep the issue where particularly complex cases are hard or impossible to represent in a declarative format, by just making it custom logic entirely.
Maybe something similar for resolving properties defined elsewhere on the object? Otherwise any custom handler in the [Dynamic] block would invoke the handlers for *all* of these dependencies (which are specified on a block level), even when they are not needed for that particular handler.
-------------
execBinary redesign
- requireOnStdout
- expectOnStdout
- failOnStdout
- requireOnStderr
- expectOnStderr
- failOnStderr
Types of handling:
- requireOn*: a result must be produced by the parsing adapter
- expectOn*: a result *may* be produced by the parsing adapter
- failOn*: if a result is produced by the parsing adapter, that constitutes an error
Adapter:
A { create: Function, supportsStreams: Boolean } object that, upon initialization/calling `create`, returns a function that takes the string or stream of output, and returns a result or throws an error/NoResult. Example adapters:
- matchLiteral: match a literal string
- booleanResult: switches from "return undefined or throw NoResult" to "return true or false"
- matchRegex: match a regular expression and extract data
- matchPeg: run a PEG parser and use its output
- matchMultiple: run multiple adapters and combine the results into a single (keyed) object
matchMultiple example:
matchMultiple({
deviceName: matchRegex(/name: (.+)/, ([ name ]) => name),
isNVMe: matchLiteral("protocol: NVMe", { booleanResult: true })
})
- Different kinds of output handling:
- expect*: require that the handler produce a result
- if result: OK
- if no result: fail
- if parsing error: fail
- handle*: optionally produce a result
- if result: OK
- if no result: OK
- if parsing error: fail
- fail*: when output is detected, produce an error
- if result: fail
- if no result: OK
- if parsing error: fail
- expectStderr (convert stderr to success result) vs. detectStderr (convert stderr to thrown error)
- expectStdout
- expectEmptyOutput
Create various utility methods for parsing stdout/stderr, that can be used separately within the expect* and detect* methods
Some sort of matchAll([ .. ]) utility for returning the results of multiple handlers/extractors? Maybe follow the 'messages' model that PostCSS follows?
Interceptor model? That can also produce messages, and modify the flags and such of the invocation
TODO: Publish error-chain! Separating out the error chaining itself, from the display
Adapt from other cause-retaining error types
full-chain instanceof?
---------------
Glossary
Bind mount
"Mounts" a folder on one (mounted) filesystem, as a separate mount/filesystem, essentially mirroring it under another location
Loopback device
Virtual block device that can be mounted, and is backed by a *file* on another (mounted) filesystem.
----------------
Utilities
fuser
Show which processes use the named files, sockets, or filesystems.

@ -9,6 +9,7 @@ Research questions:
Todo list: Todo list:
- UI: Convert existing Pug templates to JSX/GraphQL - UI: Convert existing Pug templates to JSX/GraphQL
- Finalize the conversion of the physical drives page (totals etc.)
- GraphQL API: Add database support - GraphQL API: Add database support
- GraphQL API: Memory resources (usage, available, etc.) - GraphQL API: Memory resources (usage, available, etc.)
- Wrappers: Add error handling to smartctl wrapper - Wrappers: Add error handling to smartctl wrapper
@ -643,4 +644,10 @@ Hardware -> Storage Devices
- Allocation:usage ratio - Allocation:usage ratio
- IOPS - IOPS
- Read/Write traffic - Read/Write traffic
- Read/Write latency - Read/Write latency
------
lsblk
- Use `bytes` flag to get sizes in JSON output in bytes, rather than as a unit string!

@ -4,7 +4,7 @@
"description": "A VPS management panel", "description": "A VPS management panel",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"dev": "NODE_ENV=development nodemon --ext js,pug,jsx,gql --ignore node_modules --ignore src/client bin/server.js" "dev": "NODE_ENV=development nodemon --ext js,pug,jsx,gql,pegjs --ignore node_modules --ignore src/client --inspect=9229 bin/server.js"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@ -13,20 +13,31 @@
"author": "Sven Slootweg", "author": "Sven Slootweg",
"license": "WTFPL", "license": "WTFPL",
"dependencies": { "dependencies": {
"@babel/register": "^7.4.0", "@babel/register": "^7.8.3",
"@joepie91/express-react-views": "^1.0.1", "@joepie91/express-react-views": "^1.0.1",
"@joepie91/gulp-partial-patch-livereload-logger": "^1.0.1", "@validatem/allow-extra-properties": "^0.1.0",
"@validatem/anything": "^0.1.0",
"@validatem/array-of": "^0.1.2",
"@validatem/core": "^0.3.15",
"@validatem/dynamic": "^0.1.2",
"@validatem/is-number": "^0.1.3",
"@validatem/is-regular-expression": "^0.1.0",
"@validatem/is-string": "^1.0.0",
"@validatem/required": "^0.1.1",
"@validatem/when": "^0.1.0",
"JSONStream": "^1.1.4", "JSONStream": "^1.1.4",
"argon2": "^0.27.0",
"array.prototype.flat": "^1.2.1", "array.prototype.flat": "^1.2.1",
"as-expression": "^1.0.0",
"assure-array": "^1.0.0", "assure-array": "^1.0.0",
"bhttp": "^1.2.4", "bhttp": "^1.2.4",
"bignumber.js": "^8.1.1", "bignumber.js": "^8.1.1",
"bluebird": "^3.4.6", "bluebird": "^3.4.6",
"body-parser": "^1.15.2", "body-parser": "^1.15.2",
"budo-express": "^1.0.2",
"capitalize": "^2.0.0", "capitalize": "^2.0.0",
"checkit": "^0.7.0", "chalk": "^4.1.0",
"classnames": "^2.2.6", "classnames": "^2.2.6",
"create-error": "^0.3.1",
"create-event-emitter": "^1.0.0", "create-event-emitter": "^1.0.0",
"dataloader": "^1.4.0", "dataloader": "^1.4.0",
"debounce": "^1.0.0", "debounce": "^1.0.0",
@ -34,61 +45,56 @@
"default-value": "^1.0.0", "default-value": "^1.0.0",
"dotty": "^0.1.0", "dotty": "^0.1.0",
"end-of-stream": "^1.1.0", "end-of-stream": "^1.1.0",
"entities": "^2.0.0",
"error-chain": "^0.1.2",
"escape-string-regexp": "^2.0.0", "escape-string-regexp": "^2.0.0",
"eval": "^0.1.4",
"execall": "^1.0.0", "execall": "^1.0.0",
"express": "^4.14.0", "express": "^4.14.0",
"express-promise-router": "^1.1.0", "express-promise-router": "^1.1.0",
"express-ws": "^3.0.0", "express-ws": "^3.0.0",
"fs-extra": "^3.0.1", "fs-extra": "^3.0.1",
"function-rate-limit": "^1.1.0", "function-rate-limit": "^1.1.0",
"generate-lookup-table": "^1.0.0",
"graphql": "^14.2.1", "graphql": "^14.2.1",
"joi": "^14.3.0", "is-iterable": "^1.1.1",
"is-plain-obj": "^2.1.0",
"knex": "^0.13.0", "knex": "^0.13.0",
"map-obj": "^3.0.0", "map-obj": "^3.0.0",
"match-value": "^1.1.0",
"memoizee": "^0.4.14", "memoizee": "^0.4.14",
"nanoid": "^2.1.11",
"object.fromentries": "^2.0.2",
"pegjs": "^0.10.0",
"pg": "^6.1.0", "pg": "^6.1.0",
"pug": "^2.0.0-beta6", "postgresql-socket-url": "^1.0.0",
"react-dom": "^16.8.6", "react-dom": "^16.8.6",
"rfr": "^1.2.3",
"scrypt-for-humans": "^2.0.5",
"snake-case": "^2.1.0", "snake-case": "^2.1.0",
"split": "^1.0.0", "split": "^1.0.0",
"sse-channel": "^3.1.1", "sse-channel": "^3.1.1",
"syncpipe": "^1.0.0",
"through2": "^2.0.1", "through2": "^2.0.1",
"uuid": "^2.0.2" "uuid": "^2.0.2",
"validatem": "^0.2.0"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.1.6", "@babel/core": "^7.8.4",
"@babel/preset-env": "^7.1.6", "@babel/node": "^7.8.4",
"@babel/preset-env": "^7.8.4",
"@babel/preset-react": "^7.0.0", "@babel/preset-react": "^7.0.0",
"@joepie91/gulp-preset-es2015": "^1.0.1", "@joepie91/eslint-config": "^1.1.0",
"@joepie91/gulp-preset-scss": "^1.0.1", "babel-eslint": "^10.0.3",
"babel-core": "^6.14.0",
"babel-loader": "^6.4.1",
"babel-preset-es2015": "^6.14.0",
"babel-preset-es2015-riot": "^1.1.0",
"babelify": "^10.0.0", "babelify": "^10.0.0",
"browserify-hmr": "^0.3.7", "browserify-hmr": "^0.3.7",
"budo": "^11.5.0", "budo": "^11.5.0",
"chokidar": "^1.6.0", "eslint": "^6.8.0",
"eslint": "^5.16.0", "eslint-plugin-babel": "^5.3.0",
"eslint-plugin-import": "^2.20.1",
"eslint-plugin-react": "^7.12.4", "eslint-plugin-react": "^7.12.4",
"gulp": "^3.9.1", "eslint-plugin-react-hooks": "^2.4.0",
"gulp-cached": "^1.1.0",
"gulp-livereload": "^3.8.1",
"gulp-named-log": "^1.0.1",
"gulp-nodemon": "^2.1.0",
"gulp-rename": "^1.2.2",
"jade": "^1.11.0",
"json-loader": "^0.5.4",
"listening": "^0.1.0",
"nodemon": "^1.18.11", "nodemon": "^1.18.11",
"npm-check-licenses": "^1.0.5",
"react": "^16.8.6", "react": "^16.8.6",
"react-hot-loader": "^4.3.12", "react-hot-loader": "^4.3.12"
"riot": "^3.6.1",
"riotjs-loader": "^4.0.0",
"tiny-lr": "^0.2.1",
"webpack": "^1.15.0",
"webpack-stream": "^3.2.0"
} }
} }

@ -60,6 +60,9 @@ table {
table td.hidden { table td.hidden {
border: none; } border: none; }
table.drives td {
vertical-align: top; }
table.drives td.smart.HEALTHY { table.drives td.smart.HEALTHY {
background-color: #00a500; } background-color: #00a500; }
@ -93,3 +96,9 @@ table.drives th.atRisk {
table.drives th.failing { table.drives th.failing {
color: #c20000; } color: #c20000; }
.stacktrace {
white-space: pre-wrap;
font-family: monospace; }
.stacktrace .irrelevant {
color: gray; }

@ -0,0 +1,52 @@
"use strict";
const Promise = require("bluebird");
const memoizee = require("memoizee");
const fs = Promise.promisifyAll(require("fs"));
const treecutter = require("../../packages/treecutter");
const findmnt = require("../../packages/exec-findmnt");
const shallowMerge = require("../../packages/shallow-merge");
const All = require("../../packages/graphql-interface/symbols/all");
module.exports = function () {
let findmntOnce = memoizee(() => {
return Promise.try(() => {
return findmnt();
}).then((mounts) => {
return treecutter.flatten(mounts);
}).map((mount) => {
if (mount.sourceDevice?.startsWith("/")) {
return Promise.try(() => {
return fs.realpathAsync(mount.sourceDevice);
}).then((actualSourcePath) => {
return shallowMerge(mount, {
sourceDevice: actualSourcePath
});
});
} else {
return mount;
}
}).then((list) => {
let tree = treecutter.rebuild(list);
return {
tree: tree,
list: list
};
});
});
return function (mountpoints) {
return Promise.try(() => {
return findmntOnce();
}).then(({tree, list}) => {
return mountpoints.map((mountpoint) => {
if (mountpoint === All) {
return tree;
} else {
return list.find((mount) => mount.mountpoint === mountpoint);
}
});
});
};
};

@ -2,35 +2,61 @@
const Promise = require("bluebird"); const Promise = require("bluebird");
const memoizee = require("memoizee"); const memoizee = require("memoizee");
const asExpression = require("as-expression");
const linearizeTree = require("../../linearize-tree"); const fs = Promise.promisifyAll(require("fs"));
const lsblk = require("../../wrappers/lsblk"); const lsblk = require("../../packages/exec-lsblk");
const All = require("../../graphql/symbols/all"); const All = require("../../packages/graphql-interface/symbols/all");
const treecutter = require("../../packages/treecutter");
const findInTree = require("../../packages/find-in-tree");
const shallowMerge = require("../../packages/shallow-merge");
const unreachable = require("../../packages/unreachable");
module.exports = function () { module.exports = function () {
let lsblkOnce = memoizee(() => { let lsblkOnce = memoizee(() => {
return Promise.try(() => { return Promise.try(() => {
return lsblk(); return lsblk();
}).then((tree) => {
return treecutter.flatten(tree);
}).map((device) => {
return Promise.try(() => {
return fs.realpathAsync(device.path);
}).then((actualPath) => {
return shallowMerge(device, {
path: actualPath
});
});
}).then((devices) => { }).then((devices) => {
return { return {
tree: devices, tree: treecutter.rebuild(devices),
list: linearizeTree(devices) list: devices
}; };
}); });
}); });
return function (names) { return function (selectors) {
return Promise.try(() => { return Promise.try(() => {
return lsblkOnce(); return lsblkOnce();
}).then(({tree, list}) => { }).then(({tree, list}) => {
return selectors.map((selector) => {
return names.map((name) => { if (selector === All) {
if (name === All) { // return tree;
return tree; return list;
} else { } else {
return list.find((device) => device.name === name); let { path, name } = selector;
let predicate = asExpression(() => {
if (path != null) {
return (device) => device.path === path;
} else if (name != null) {
return (device) => device.name === name;
} else {
unreachable("No selector specified for lsblk");
}
});
return findInTree({ tree, predicate });
} }
}); });
}); });
}; };
}; };

@ -3,8 +3,8 @@
const Promise = require("bluebird"); const Promise = require("bluebird");
const memoizee = require("memoizee"); const memoizee = require("memoizee");
const lvm = require("../../../wrappers/lvm"); const lvm = require("../../../packages/exec-lvm");
const All = require("../../../graphql/symbols/all"); const All = require("../../../packages/graphql-interface/symbols/all");
module.exports = function () { module.exports = function () {
let getPhysicalVolumesOnce = memoizee(lvm.getPhysicalVolumes); let getPhysicalVolumesOnce = memoizee(lvm.getPhysicalVolumes);
@ -22,4 +22,4 @@ module.exports = function () {
}); });
}); });
}; };
}; };

@ -0,0 +1,13 @@
"use strict";
const Promise = require("bluebird");
const nvmeCli = require("../../../packages/exec-nvme-cli");
module.exports = function () {
return function (controllerPaths) {
return Promise.map(controllerPaths, (path) => {
return nvmeCli.listNamespaces({ devicePath: path });
});
};
};

@ -1,7 +1,7 @@
"use strict"; "use strict";
const Promise = require("bluebird"); const Promise = require("bluebird");
const smartctl = require("../../../wrappers/smartctl"); const smartctl = require("../../../packages/exec-smartctl");
module.exports = function () { module.exports = function () {
return function (paths) { return function (paths) {
@ -9,4 +9,4 @@ module.exports = function () {
return smartctl.attributes({ devicePath: path }); return smartctl.attributes({ devicePath: path });
}); });
}; };
}; };

@ -1,7 +1,7 @@
"use strict"; "use strict";
const Promise = require("bluebird"); const Promise = require("bluebird");
const smartctl = require("../../../wrappers/smartctl"); const smartctl = require("../../../packages/exec-smartctl");
module.exports = function () { module.exports = function () {
return function (paths) { return function (paths) {
@ -9,4 +9,4 @@ module.exports = function () {
return smartctl.info({ devicePath: path }); return smartctl.info({ devicePath: path });
}); });
}; };
}; };

@ -3,8 +3,8 @@
const Promise = require("bluebird"); const Promise = require("bluebird");
const memoizee = require("memoizee"); const memoizee = require("memoizee");
const smartctl = require("../../../wrappers/smartctl"); const smartctl = require("../../../packages/exec-smartctl");
const All = require("../../../graphql/symbols/all"); const All = require("../../../packages/graphql-interface/symbols/all");
module.exports = function () { module.exports = function () {
let scanOnce = memoizee(smartctl.scan); let scanOnce = memoizee(smartctl.scan);
@ -22,4 +22,4 @@ module.exports = function () {
}); });
}); });
}; };
}; };

@ -5,9 +5,9 @@ const graphql = require("graphql");
const fs = require("fs"); const fs = require("fs");
const path = require("path"); const path = require("path");
const createGraphQLInterface = require("../graphql/index"); const createGraphQLInterface = require("../packages/graphql-interface/index");
const All = require("../graphql/symbols/all"); const All = require("../packages/graphql-interface/symbols/all");
const loadTypes = require("../graphql/type-loader"); const loadTypes = require("../packages/graphql-interface/type-loader");
const createLoaders = require("./loaders"); const createLoaders = require("./loaders");
@ -45,6 +45,7 @@ let schema = graphql.buildSchema(fs.readFileSync(path.resolve(__dirname, "../sch
let types = loadTypes({ let types = loadTypes({
Drive: require("./types/drive"), Drive: require("./types/drive"),
BlockDevice: require("./types/block-device"), BlockDevice: require("./types/block-device"),
Mount: require("./types/mount"),
LVMPhysicalVolume: require("./types/lvm-physical-volume"), LVMPhysicalVolume: require("./types/lvm-physical-volume"),
LVMVolumeGroup: require("./types/lvm-volume-group"), LVMVolumeGroup: require("./types/lvm-volume-group"),
}); });

@ -5,10 +5,12 @@ const mapObj = require("map-obj");
let dataSourceFactories = { let dataSourceFactories = {
lsblk: require("./data-sources/lsblk"), lsblk: require("./data-sources/lsblk"),
findmnt: require("./data-sources/findmnt"),
smartctlInfo: require("./data-sources/smartctl/info"), smartctlInfo: require("./data-sources/smartctl/info"),
smartctlScan: require("./data-sources/smartctl/scan"), smartctlScan: require("./data-sources/smartctl/scan"),
smartctlAttributes: require("./data-sources/smartctl/attributes"), smartctlAttributes: require("./data-sources/smartctl/attributes"),
lvmPhysicalVolumes: require("./data-sources/lvm/physical-volumes"), lvmPhysicalVolumes: require("./data-sources/lvm/physical-volumes"),
nvmeListNamespaces: require("./data-sources/nvme/list-namespaces"),
}; };
module.exports = function createLoaders() { module.exports = function createLoaders() {
@ -18,4 +20,4 @@ module.exports = function createLoaders() {
new DataLoader(factory()) new DataLoader(factory())
]; ];
}); });
}; };

@ -1,35 +1,85 @@
"use strict"; "use strict";
const {createDataObject, LocalProperties, ID} = require("../../graphql/data-object"); const Promise = require("bluebird");
const deviceNameFromPath = require("../../device-name-from-path"); const fs = Promise.promisifyAll(require("fs"));
const mapValue = require("../../map-value"); const matchValue = require("match-value");
module.exports = function (_types) { const { createDataObject, LocalProperties, ID, Dynamic } = require("../../packages/graphql-interface/data-object");
return function BlockDevice({ name, path }) { const All = require("../../packages/graphql-interface/symbols/all");
if (name != null) { const treecutter = require("../../packages/treecutter");
path = `/dev/${name}`;
} else if (path != null) { module.exports = function (types) {
name = deviceNameFromPath(path); return function BlockDevice({ name, path, _treecutterDepth, _treecutterSequenceNumber }) {
} // if (name != null) {
// path = `/dev/${name}`;
// } else if (path != null) {
// name = deviceNameFromPath(path);
// }
// return Promise.try(() => {
// return fs.realpathAsync(path);
// }).then((realPath) => {
/* FIXME: parent */ /* FIXME: parent */
return createDataObject({ return createDataObject({
[LocalProperties]: { [LocalProperties]: {
path: path _treecutterDepth,
_treecutterSequenceNumber
},
[Dynamic]: {
mounts: function ({ type }, { resolveProperty, resolvePropertyPath, resolveDataSource }) {
return Promise.try(() => {
return resolveDataSource("findmnt", All);
}).then((allMounts) => {
return treecutter.flatten(allMounts);
}).map((mount) => {
return types.Mount({ mountpoint: mount.mountpoint });
}).filter((mount) => {
return Promise.try(() => {
return resolvePropertyPath([ "sourceDevice", "path" ], mount);
}).then((sourceDevicePath) => {
// FIXME: Get own path dynamically
return (sourceDevicePath === path);
});
}).then((mounts) => {
if (type != null) {
return Promise.filter(mounts, (mount) => {
return Promise.try(() => {
return resolveProperty("type", mount);
}).then((mountType) => {
return (mountType === type);
});
});
} else {
return mounts;
}
});
}
}, },
// findmnt: {
// [ID]: All,
// mounts: function (allMounts, { type }, context) {
// let { resolveProperty } = context;
// console.log("CONTEXT", context);
// // FIXME: Why is this called so often?
// }
// },
lsblk: { lsblk: {
[ID]: name, [ID]: { name, path },
name: "name", name: "name",
path: (device) => {
return fs.realpathAsync(device.path);
},
type: (device) => { type: (device) => {
return mapValue(device.type, { return matchValue(device.type, {
partition: "PARTITION", partition: "PARTITION",
disk: "DISK", disk: "DISK",
loopDevice: "LOOP_DEVICE" loopDevice: "LOOP_DEVICE"
}); });
}, },
size: "size", size: "size",
mountpoint: "mountpoint", mountpoint: "mountpoint", // FIXME: Isn't this obsoleted by `mounts`?
deviceNumber: "deviceNumber", deviceNumber: "deviceNumber",
removable: "removable", removable: "removable",
readOnly: "readOnly", readOnly: "readOnly",
@ -40,5 +90,6 @@ module.exports = function (_types) {
} }
} }
}); });
// });
}; };
}; };

@ -2,40 +2,99 @@
const Promise = require("bluebird"); const Promise = require("bluebird");
const {createDataObject, LocalProperties, ID} = require("../../graphql/data-object"); const {createDataObject, LocalProperties, ID, Dynamic} = require("../../packages/graphql-interface/data-object");
const upperSnakeCase = require("../../upper-snake-case"); const upperSnakeCase = require("../../packages/upper-snake-case");
const linearizeTree = require("../../linearize-tree"); const treecutter = require("../../packages/treecutter");
const deviceNameFromPath = require("../../device-name-from-path"); const deviceNameFromPath = require("../../util/device-name-from-path");
/* TO IMPLEMENT:
- resolveProperty
- resolveProperties
- resolveDataSource
- Dynamic
*/
module.exports = function (types) { module.exports = function (types) {
return function Drive({ path }) { return function Drive({ path }) {
return createDataObject({ return createDataObject({
[LocalProperties]: { [LocalProperties]: {
path: path, path: path,
blockDevice: () => {
return types.BlockDevice({ path: path });
},
/* FIXME: allBlockDevices, for representing every single block device that's hosted on this physical drive, linearly. Need to figure out how that works with representation of mdraid arrays, LVM volumes, etc. */ /* FIXME: allBlockDevices, for representing every single block device that's hosted on this physical drive, linearly. Need to figure out how that works with representation of mdraid arrays, LVM volumes, etc. */
}, },
lsblk: { [Dynamic]: {
[ID]: deviceNameFromPath(path), // FIXME: namespaces
allBlockDevices: function (rootDevice, { type }, context) { blockDevice: (_, { resolveProperty }) => {
let devices = linearizeTree([rootDevice]) return Promise.try(() => {
.map((device) => types.BlockDevice({ name: device.name })); return resolveProperty("interface");
}).then((interface_) => {
if (interface_ === "nvme") {
/* NVMe drives do not have a single block device, they have zero or more namespaces */
return null;
} else {
return types.BlockDevice({ path: path });
}
});
},
allBlockDevices: ({ type }, { resolveProperty, resolveDataSource }) => {
// FIXME: Figure out how to semantically represent that data cannot be stored directly onto an NVMe device (only onto a namespace), but *can* be directly stored on a *non-NVMe* device... usually, anyway.
if (type != null) { return Promise.try(() => {
return Promise.filter(devices, (device) => { return resolveProperty("interface");
}).then((interface_) => {
if (interface_ === "nvme") {
// Dynamic data source lookup: nvme list-ns -> Drive
return Promise.try(() => { return Promise.try(() => {
return device.type({}, context); return resolveDataSource("nvmeListNamespaces", path);
}).then((deviceType) => { }).map((namespaceId) => {
return (deviceType === type); return `${path}n${namespaceId}`;
}); });
}); } else {
} else { return [ path ];
return devices; }
} }).map((rootPath) => {
return resolveDataSource("lsblk", { path: rootPath });
}).then((blockDeviceTrees) => {
let blockDevices = treecutter.flatten(blockDeviceTrees)
.map((device) => types.BlockDevice(device));
// MARKER: Find a way to reassemble this tree on the client side, for display
// MARKER: Why are most of the mounts (erroneously) empty?
if (type != null) {
return Promise.filter(blockDevices, (device) => {
return Promise.try(() => {
return resolveProperty("type", device.item);
}).then((deviceType) => {
return (deviceType === type);
});
});
} else {
return blockDevices;
}
});
} }
}, },
lsblk: {
[ID]: { path },
// TODO: Implement [DependsOn], for cases where a source data mapper depends on data from more than one source, so it can reference properties defined elsewhere?
// FIXME: Figure out a nice way to make a source lookup conditional upon something else (like only do a `lsblk` if not an NVMe drive, and for NVMe drives return a hardcoded thing)
// allBlockDevices: function (rootDevice, { type }, context) {
// let devices = treecutter.flatten([rootDevice])
// .map((device) => types.BlockDevice({ name: device.name }));
// if (type != null) {
// return Promise.filter(devices, (device) => {
// return Promise.try(() => {
// return device.type({}, context);
// }).then((deviceType) => {
// return (deviceType === type);
// });
// });
// } else {
// return devices;
// }
// }
},
smartctlScan: { smartctlScan: {
[ID]: path, [ID]: path,
interface: "interface" interface: "interface"
@ -87,4 +146,4 @@ module.exports = function (types) {
} }
}); });
}; };
}; };

@ -1,6 +1,6 @@
"use strict"; "use strict";
const {createDataObject, LocalProperties, ID} = require("../../graphql/data-object"); const {createDataObject, LocalProperties, ID} = require("../../packages/graphql-interface/data-object");
module.exports = function (types) { module.exports = function (types) {
return function LVMPhysicalVolume({ path }) { return function LVMPhysicalVolume({ path }) {
@ -29,4 +29,4 @@ module.exports = function (types) {
} }
}); });
}; };
}; };

@ -1,6 +1,6 @@
"use strict"; "use strict";
const {createDataObject, LocalProperties} = require("../../graphql/data-object"); const {createDataObject, LocalProperties} = require("../../packages/graphql-interface/data-object");
module.exports = function (_types) { module.exports = function (_types) {
return function createVolumeGroup({ name }) { return function createVolumeGroup({ name }) {
@ -10,4 +10,4 @@ module.exports = function (_types) {
} }
}); });
}; };
}; };

@ -0,0 +1,87 @@
"use strict";
const Promise = require("bluebird");
const fs = Promise.promisifyAll(require("fs"));
const {createDataObject, LocalProperties, ID, Dynamic} = require("../../packages/graphql-interface/data-object");
module.exports = function (types) {
return function Mount({ mountpoint }) {
return createDataObject({
[LocalProperties]: {
mountpoint: mountpoint
},
[Dynamic]: {
sourceDevice: (_, { resolveDataSource }) => {
// FIXME: This code is rather bulky, maybe there should be a first-class way to express "try to create a data object that may fail"
return Promise.try(() => {
return resolveDataSource("findmnt", mountpoint);
}).then((mount) => {
if (mount.sourceDevice != null) {
return Promise.try(() => {
return fs.realpathAsync(mount.sourceDevice);
}).then((sourcePath) => {
return Promise.try(() => {
return resolveDataSource("lsblk", { path: sourcePath });
}).then((lsblkResult) => {
if (lsblkResult != null) {
return types.BlockDevice({ path: sourcePath });
} else {
// This occurs when the `sourceDevice` is a valid device, but it is not a *block* device, eg. like with `/dev/fuse`
return null;
}
});
});
} else {
return null;
}
});
}
},
findmnt: {
[ID]: mountpoint,
id: "id",
// FIXME: Aren't we inferring the below somewhere else in the code, using the square brackets?
type: (mount) => {
if (mount.rootPath === "/") {
return "ROOT_MOUNT";
} else {
return "SUBMOUNT";
}
},
// sourceDevice: (mount) => {
// return Promise.try(() => {
// if (mount.sourceDevice != null) {
// return Promise.try(() => {
// return fs.realpathAsync(mount.sourceDevice);
// }).then((sourcePath) => {
// return types.BlockDevice({ path: sourcePath });
// });
// } else {
// return null;
// }
// });
// },
filesystem: "filesystem",
options: "options",
label: "label",
uuid: "uuid",
partitionLabel: "partitionLabel",
partitionUUID: "partitionUUID",
deviceNumber: "deviceNumber",
totalSpace: "totalSpace",
freeSpace: "freeSpace",
usedSpace: "usedSpace",
rootPath: "rootPath",
taskID: "taskID",
optionalFields: "optionalFields",
propagationFlags: "propagationFlags",
children: (mount) => {
return mount.children.map((child) => {
return Mount({ mountpoint: child.mountpoint });
});
}
}
});
};
};

@ -6,8 +6,12 @@ const express = require("express");
const knex = require("knex"); const knex = require("knex");
const path = require("path"); const path = require("path");
const bodyParser = require("body-parser"); const bodyParser = require("body-parser");
const graphql = require("graphql");
const chalk = require("chalk");
const util = require("util");
const errorChain = require("error-chain");
const expressAsyncReact = require("./express-async-react"); const expressAsyncReact = require("./packages/express-async-react");
function projectPath(targetPath) { function projectPath(targetPath) {
return path.join(__dirname, "..", targetPath); return path.join(__dirname, "..", targetPath);
@ -15,7 +19,7 @@ function projectPath(targetPath) {
module.exports = function () { module.exports = function () {
let db = knex(require("../knexfile")); let db = knex(require("../knexfile"));
let imageStore = require("./image-store")(projectPath("./images")); let imageStore = require("./util/image-store")(projectPath("./images"));
let taskTracker = require("../lib/tasks/tracker")(); let taskTracker = require("../lib/tasks/tracker")();
let apiQuery = require("./api")(); let apiQuery = require("./api")();
@ -75,19 +79,34 @@ module.exports = function () {
app.use("/hardware/storage-devices", require("./routes/storage-devices")(state)); app.use("/hardware/storage-devices", require("./routes/storage-devices")(state));
app.use((err, req, res, next) => { app.use((err, req, res, next) => {
if (err.showChain != null) { /* GraphQL will wrap any data-resolving errors in its own error type, and that'll break our `showChain` logic below. Note that some GraphQL errors may not *have* an originalError (eg. schema violations), so we account for that as well. */
console.log(err.showChain()); let sourceError = (err instanceof graphql.GraphQLError && err.originalError != null)
console.log("#####################"); ? err.originalError
console.log(err.getAllContext()); : err;
} else { console.error(errorChain.render(sourceError));
console.log(err.stack);
// FIXME: Render full context instead, according to error-chain?
for (let key of Object.keys(err)) {
console.error(chalk.yellow.bold(`${key}: `) + util.inspect(err[key], { colors: true }));
} }
// if (sourceError.showChain != null) {
// console.log(sourceError.showChain());
// console.log("#####################");
// console.log(sourceError.getAllContext());
// } else {
// console.log(sourceError.stack);
// }
res.render("error", { res.render("error", {
error: err error: err
}); });
debugger;
}); });
return app; return app;
}; };

@ -0,0 +1,6 @@
"use strict";
module.exports = function concat(characters) {
// NOTE: This function doesn't really *do* much, it mostly exists to have a more conceptually useful name for this operation (since `.join("")` is non-obvious as to its purpose). This operation is often needed when writing PEG.js parsers, since those will parse byte-by-byte, and so any repeating modifier will result in an *array of characters* when what you usually want is a string. This makes it a string.
return characters.join("");
};

@ -1,8 +0,0 @@
"use strict";
const matchOrError = require("./match-or-error");
module.exports = function deviceNameFromPath(path) {
let [name] = matchOrError(/^\/dev\/(.+)$/, path);
return name;
};

@ -2,26 +2,17 @@
const errorChain = require("error-chain"); const errorChain = require("error-chain");
let HttpError = errorChain("HttpError", { let HttpError = errorChain.create("HttpError", {
exposeToUser: true context: { exposeToUser: true }
}); });
module.exports = { module.exports = {
UnauthorizedError: errorChain("UnauthorizedError", { UnauthorizedError: errorChain.create("UnauthorizedError", {
statusCode: 401 inheritsFrom: HttpError,
context: { statusCode: 401 }
}),
ForbiddenError: errorChain.create("ForbiddenError", {
inheritsFrom: HttpError,
context: { statusCode: 403 }
}, HttpError), }, HttpError),
ForbiddenError: errorChain("ForbiddenError", {
statusCode: 403
}, HttpError),
UnexpectedOutput: errorChain("UnexpectedOutput"),
ExpectedOutputMissing: errorChain("ExpectedOutputMissing"),
NonZeroExitCode: errorChain("NonZeroExitCode"),
CommandExecutionFailed: errorChain("CommandExecutionFailed"),
InvalidPath: errorChain("InvalidPath"),
InvalidName: errorChain("InvalidName"),
PartitionExists: errorChain("PartitionExists"),
VolumeGroupExists: errorChain("VolumeGroupExists"),
InvalidVolumeGroup: errorChain("InvalidVolumeGroup"),
PhysicalVolumeInUse: errorChain("PhysicalVolumeInUse"),
}; };

@ -1,375 +0,0 @@
"use strict";
require("array.prototype.flat").shim();
const Promise = require("bluebird");
const util = require("util");
const execFileAsync = util.promisify(require("child_process").execFile);
const execAll = require("execall");
const debug = require("debug")("cvm:execBinary");
const errors = require("./errors");
let None = Symbol("None");
/* FIXME: How to handle partial result parsing when an error is encountered in the parsing code? */
/* FIXME: "terminal" flag for individual matches in exec-binary */
/* FIXME: Test that flag-dash prevention in arguments works */
function keyToFlagName(key) {
if (key.startsWith("!")) {
return key.slice(1);
} else if (key.length === 1) {
return `-${key}`;
} else {
return `--${key}`;
}
}
function flagValueToArgs(key, value) {
if (value === true) {
return [key];
} else if (Array.isArray(value)) {
return value.map((item) => {
return flagValueToArgs(key, item);
}).flat();
} else {
return [key, value];
}
}
function flagsToArgs(flags) {
return Object.keys(flags).map((key) => {
let value = flags[key];
let flagName = keyToFlagName(key);
return flagValueToArgs(flagName, value);
}).flat();
}
function regexExpectationsForChannel(object, channel) {
return object._settings.expectations.filter((expectation) => {
return expectation.channel === channel && expectation.type === "regex";
});
}
function executeExpectation(expectation, stdout, stderr) {
let output = (expectation.channel === "stdout") ? stdout : stderr;
if (expectation.type === "regex") {
if (expectation.regex.test(output)) {
return executeRegexExpectation(expectation, output);
} else {
return None;
}
} else if (expectation.type === "json") {
let parsedOutput = JSON.parse(output);
if (expectation.callback != null) {
return expectation.callback(parsedOutput);
} else {
return parsedOutput;
}
} else {
throw new Error(`Unexpected expectation type: ${expectation.type}`);
}
}
function executeRegexExpectation(expectation, input) {
function processResult(fullMatch, groups) {
if (expectation.callback != null) {
return expectation.callback(groups, fullMatch, input);
} else {
return groups;
}
}
if (expectation.matchAll) {
let matches = execAll(expectation.regex, input);
if (matches.length > 0) { /* FILEBUG: File issue on execall repo to document the no-match output */
let results = matches.map((match) => {
return processResult(match.match, match.sub);
}).filter((result) => {
return (result !== None);
});
if (results.length > 0) {
return results;
} else {
return None;
}
} else {
return None;
}
} else {
let match = expectation.regex.exec(input);
if (match != null) {
return processResult(match[0], match.slice(1));
} else {
return None;
}
}
}
function verifyRegex(regex, {matchAll}) {
if (matchAll === true && !regex.flags.includes("g")) {
throw new Error("You enabled the 'matchAll' option, but the specified regular expression is not a global one; you probably forgot to specify the 'g' flag");
}
}
function validateArguments(args) {
if (args.some((arg) => arg == null)) {
throw new Error("One or more arguments were undefined or null; this is probably a mistake in how you're calling the command");
} else if (args.some((arg) => arg[0] === "-")) {
throw new Error("For security reasons, command arguments cannot start with a dash; use the 'withFlags' method if you want to specify flags");
}
}
module.exports = function createBinaryInvocation(command, args = []) {
/* FIXME: The below disallows dashes in the args, but not in the command. Is that what we want? */
validateArguments(args);
return {
_settings: {
asRoot: false,
singleResult: false,
atLeastOneResult: false,
jsonStdout: false,
jsonStderr: false,
expectations: [],
flags: {},
environment: {}
},
_withSettings: function (newSettings) {
let newObject = Object.assign({}, this, {
_settings: Object.assign({}, this._settings, newSettings)
});
/* FIXME: Make this ignore json expectations */
let hasStdoutExpectations = (regexExpectationsForChannel(newObject, "stdout").length > 0);
let hasStderrExpectations = (regexExpectationsForChannel(newObject, "stderr").length > 0);
if (newObject._settings.jsonStdout && hasStdoutExpectations) {
throw new Error("The 'expectJsonStdout' and 'expectStdout' options cannot be combined");
} else if (newObject._settings.jsonStderr && hasStderrExpectations) {
throw new Error("The 'expectJsonStderr' and 'expectStderr' options cannot be combined");
} else {
return newObject;
}
},
asRoot: function () {
return this._withSettings({ asRoot: true });
},
singleResult: function () {
return this._withSettings({ singleResult: true });
},
atLeastOneResult: function () {
return this._withSettings({ atLeastOneResult: true });
},
/* NOTE: Subsequent withFlags calls involving the same flag key will *override* the earlier value, not add to it! */
withFlags: function (flags) {
if (flags != null) {
return this._withSettings({
flags: Object.assign({}, this._settings.flags, flags)
});
} else {
return this;
}
},
withEnvironment: function (environment) {
if (environment != null) {
return this._withSettings({
environment: Object.assign({}, this._settings.environment, environment)
});
} else {
return this;
}
},
withModifier: function (modifierFunction) {
if (modifierFunction != null) {
return modifierFunction(this);
} else {
return this;
}
},
expectJsonStdout: function (callback) {
if (!this._settings.jsonStdout) {
return this._withSettings({
jsonStdout: true,
expectations: this._settings.expectations.concat([{
type: "json",
channel: "stdout",
key: "stdout",
callback: callback
}])
});
}
},
expectJsonStderr: function (callback) {
if (!this._settings.jsonStderr) {
return this._withSettings({
jsonStderr: true,
expectations: this._settings.expectations.concat([{
type: "json",
channel: "stderr",
key: "stderr",
callback: callback
}])
});
}
},
expectStdout: function (key, regex, {required, result, matchAll} = {}) {
verifyRegex(regex, {matchAll});
return this._withSettings({
expectations: this._settings.expectations.concat([{
type: "regex",
channel: "stdout",
required: (required === true),
key: key,
regex: regex,
callback: result,
matchAll: matchAll
}])
});
},
expectStderr: function (key, regex, {required, result, matchAll} = {}) {
verifyRegex(regex, {matchAll});
return this._withSettings({
expectations: this._settings.expectations.concat([{
type: "regex",
channel: "stderr",
required: (required === true),
key: key,
regex: regex,
callback: result,
matchAll: matchAll
}])
});
},
then: function () {
throw new Error("Attempted to use a command builder as a Promise; you probably forgot to call .execute");
},
execute: function () {
return Promise.try(() => {
let effectiveCommand = command;
let effectiveArgs = flagsToArgs(this._settings.flags).concat(args);
if (this._settings.asRoot) {
effectiveCommand = "sudo";
effectiveArgs = [command].concat(effectiveArgs);
}
let effectiveCompleteCommand = [effectiveCommand].concat(effectiveArgs);
return Promise.try(() => {
debug(`Running: ${effectiveCommand} ${effectiveArgs.map((arg) => `"${arg}"`).join(" ")}`);
return execFileAsync(effectiveCommand, effectiveArgs, {
env: Object.assign({}, process.env, this._settings.environment)
});
}).then(({stdout, stderr}) => {
return { stdout, stderr, exitCode: 0 };
}).catch((error) => {
let {stdout, stderr} = error;
let exitCode = (typeof error.code === "number") ? error.code : null;
return { stdout, stderr, error, exitCode };
}).then(({stdout, stderr, error, exitCode}) => {
let finalResult, resultFound;
try {
if (this._settings.singleResult) {
let result = None;
let i = 0;
while (result === None && i < this._settings.expectations.length) {
let expectation = this._settings.expectations[i];
result = executeExpectation(expectation, stdout, stderr);
if (expectation.required === true && result === None) {
throw new errors.ExpectedOutputMissing(`Expected output not found for key '${expectation.key}'`, {
exitCode: exitCode,
stdout: stdout,
stderr: stderr
});
}
i += 1;
}
finalResult = result;
resultFound = (finalResult !== None);
} else {
let results = this._settings.expectations.map((expectation) => {
let result = executeExpectation(expectation, stdout, stderr);
if (result === None) {
if (expectation.required === true) {
throw new errors.ExpectedOutputMissing(`Expected output not found for key '${expectation.key}'`, {
exitCode: exitCode,
stdout: stdout,
stderr: stderr
});
} else {
return result;
}
} else {
return { key: expectation.key, value: result };
}
}).filter((result) => {
return (result !== None);
});
resultFound = (results.length > 0);
finalResult = results.reduce((object, {key, value}) => {
return Object.assign(object, {
[key]: value
});
}, {});
}
} catch (processingError) {
throw errors.UnexpectedOutput.chain(processingError, "An error occurred while processing command output", {
command: effectiveCompleteCommand,
exitCode: exitCode,
stdout: stdout,
stderr: stderr
});
}
if (resultFound || this._settings.atLeastOneResult === false) {
if (error != null) {
throw new errors.NonZeroExitCode.chain(error, `Process '${command}' exited with code ${exitCode}`, {
exitCode: exitCode,
stdout: stdout,
stderr: stderr,
result: finalResult
});
} else {
return {
exitCode: exitCode,
stdout: stdout,
stderr: stderr,
result: finalResult
};
}
} else {
throw new errors.ExpectedOutputMissing("None of the expected outputs for the command were encountered, but at least one result is required", {
exitCode: exitCode,
stdout: stdout,
stderr: stderr
});
}
}).catch(errors.CommandExecutionFailed.rethrowChained(`An error occurred while executing '${command}'`, {
command: effectiveCompleteCommand
}));
});
}
};
};

@ -58,104 +58,140 @@ let makeQuery = api();
return Promise.try(() => { return Promise.try(() => {
let query = gql` let query = gql`
# query SomeDrives($drivePaths: [String]) { query {
query SomeDrives {
hardware { hardware {
drives { drives {
path smartHealth
interface size
rpm
serialNumber
model model
modelFamily modelFamily
smartAvailable
smartEnabled
serialNumber
wwn
firmwareVersion firmwareVersion
size
rpm
logicalSectorSize
physicalSectorSize
formFactor
ataVersion
sataVersion
smartHealth blockDevice {
# smartAttributes { name
# name }
# type
# value partitions: allBlockDevices(type: PARTITION) {
# failingNow name
size
# flags {
# affectsPerformance mounts {
# indicatesFailure mountpoint
# } }
# } }
# blockDevice {
# removable
# children {
# name
# mountpoint
# size
# }
# }
} }
} }
# resources {
# blockDevices {
# name
# mountpoint
# size
# deviceNumber
# removable
# readOnly
# parent { name }
# children {
# name
# mountpoint
# size
# deviceNumber
# removable
# readOnly
# parent { name }
# }
# }
# lvm {
# physicalVolumes {
# path
# blockDevice {
# name
# deviceNumber
# }
# volumeGroup {
# name
# }
# format
# size
# freeSpace
# duplicate
# allocatable
# used
# exported
# missing
# }
# }
# }
} }
`; `;
// let query = gql`
// # query SomeDrives($drivePaths: [String]) {
// query SomeDrives {
// hardware {
// drives {
// path
// interface
// model
// modelFamily
// blockDevice {
// submounts: mounts(type: SUBMOUNT) {
// mountpoint
// filesystem
// }
// }
// # smartAvailable
// # smartEnabled
// # serialNumber
// # wwn
// # firmwareVersion
// # size
// # rpm
// # logicalSectorSize
// # physicalSectorSize
// # formFactor
// # ataVersion
// # sataVersion
// # smartHealth
// # smartAttributes {
// # name
// # type
// # value
// # failingNow
// # flags {
// # affectsPerformance
// # indicatesFailure
// # }
// # }
// # blockDevice {
// # removable
// # children {
// # name
// # mountpoint
// # size
// # }
// # }
// }
// }
// # resources {
// # blockDevices {
// # name
// # mountpoint
// # size
// # deviceNumber
// # removable
// # readOnly
// # parent { name }
// # children {
// # name
// # mountpoint
// # size
// # deviceNumber
// # removable
// # readOnly
// # parent { name }
// # }
// # }
// # lvm {
// # physicalVolumes {
// # path
// # blockDevice {
// # name
// # deviceNumber
// # }
// # volumeGroup {
// # name
// # }
// # format
// # size
// # freeSpace
// # duplicate
// # allocatable
// # used
// # exported
// # missing
// # }
// # }
// # }
// }
// `;
return makeQuery(query, { return makeQuery(query, {
// drivePaths: ["/dev/sda", "/dev/sdb"] // drivePaths: ["/dev/sda", "/dev/sdb"]
}); });
}).then((results) => { }).then((results) => {
debugDisplay(results); debugDisplay(results);
}); });

@ -1,62 +0,0 @@
"use strict";
const Promise = require("bluebird");
function withProperty(dataSource, id, property) {
return withData(dataSource, id, (value) => {
return value[property];
});
}
function withData(dataSource, id, callback) {
return function (args, context) {
let {data} = context;
return Promise.try(() => {
if (data[dataSource] != null) {
return data[dataSource].load(id);
} else {
throw new Error(`Specified data source '${dataSource}' does not exist`);
}
}).then((value) => {
if (value != null) {
return callback(value, args, context);
} else {
throw new Error(`Got a null value from data source '${dataSource}' for ID '${id}'`);
}
});
};
}
let ID = Symbol("ID");
let LocalProperties = Symbol("localProperties");
module.exports = {
ID: ID,
LocalProperties: LocalProperties,
createDataObject: function createDataObject(mappings) {
let object = {};
if (mappings[LocalProperties] != null) {
Object.assign(object, mappings[LocalProperties]);
}
for (let [dataSource, items] of Object.entries(mappings)) {
if (items[ID] != null) {
let id = items[ID];
for (let [property, source] of Object.entries(items)) {
if (typeof source === "string") {
object[property] = withProperty(dataSource, id, source);
} else if (typeof source === "function") {
object[property] = withData(dataSource, id, source);
}
}
} else {
throw new Error(`No object ID was provided for the '${dataSource}' data source`);
}
}
return object;
}
};

@ -1,11 +0,0 @@
"use strict";
const graphql = require("graphql");
module.exports = function createGraphQLInterface(schema, options, root) {
return function makeQuery(query, args) {
return graphql.graphql(schema, query, root, {
data: (options.loaderFactory != null) ? options.loaderFactory() : {}
}, args);
}
};

@ -1,19 +0,0 @@
"use strict";
module.exports = function linearizeTree(rootList, childrenProperty = "children") {
let linearizedItems = [];
function add(list) {
for (let item of list) {
linearizedItems.push(item);
if (item[childrenProperty] != null) {
add(item[childrenProperty]);
}
}
}
add(rootList);
return linearizedItems;
};

@ -1,11 +0,0 @@
"use strict";
module.exports = function mapValue(value, mapping) {
if (value == null) {
return value;
} else if (mapping[value] != null) {
return mapping[value];
} else {
throw new Error(`Unrecognized value: ${value}`);
}
};

@ -1,17 +0,0 @@
"use strict";
module.exports = function matchOrError(regex, string) {
if (regex == null) {
throw new Error("No regular expression was provided");
} else if (string == null) {
throw new Error("No string to match on was provided");
} else {
let match = regex.exec(string);
if (match == null) {
throw new Error(`Regular expression ${regex.toString()} failed to match on string: ${string}`);
} else {
return match.slice(1);
}
}
};

@ -0,0 +1,11 @@
"use strict";
const errorChain = require("error-chain");
module.exports = {
UnexpectedOutput: errorChain.create("UnexpectedOutput"),
ExpectedOutputMissing: errorChain.create("ExpectedOutputMissing"),
OutputParsingFailed: errorChain.create("OutputParsingFailed"),
NonZeroExitCode: errorChain.create("NonZeroExitCode"),
CommandExecutionFailed: errorChain.create("CommandExecutionFailed"),
};

@ -0,0 +1,285 @@
"use strict";
require("array.prototype.flat").shim();
const Promise = require("bluebird");
const util = require("util");
const execFileAsync = util.promisify(require("child_process").execFile);
const debug = require("debug")("cvm:execBinary");
const asExpression = require("as-expression");
const { rethrowAs } = require("error-chain");
const textParser = require("../text-parser");
const errors = require("./errors");
/* FIXME: How to handle partial result parsing when an error is encountered in the parsing adapter? */
/* FIXME: Test that flag-dash prevention in arguments works */
function keyToFlagName(key) {
if (key.startsWith("!")) {
return key.slice(1);
} else if (key.length === 1) {
return `-${key}`;
} else {
return `--${key}`;
}
}
function flagValueToArgs(key, value) {
if (value === true) {
return [key];
} else if (Array.isArray(value)) {
return value.map((item) => {
return flagValueToArgs(key, item);
}).flat();
} else {
return [key, value];
}
}
function flagsToArgs(flags) {
return Object.keys(flags).map((key) => {
let value = flags[key];
let flagName = keyToFlagName(key);
return flagValueToArgs(flagName, value);
}).flat();
}
function validateArguments(args) {
if (args.some((arg) => arg == null)) {
throw new Error("One or more arguments were undefined or null; this is probably a mistake in how you're calling the command");
} else if (args.some((arg) => arg[0] === "-")) {
throw new Error("For security reasons, command arguments cannot start with a dash; use the 'withFlags' method if you want to specify flags");
}
}
// FIXME: Immutable-builder abstraction
// FIXME: validatem
module.exports = function createBinaryInvocation(command, args = []) {
/* FIXME: The below disallows dashes in the args, but not in the command. Is that what we want? */
validateArguments(args);
return {
_settings: {
asRoot: false,
expectations: [],
flags: {},
environment: {},
expectedExitCodes: [0],
resultMerger: function (results) {
return results.reduce((merged, result) => Object.assign(merged, result), {});
}
},
_withSettings: function (newSettings) {
let newObject = Object.assign({}, this, {
_settings: Object.assign({}, this._settings, newSettings)
});
return newObject;
},
_withExpectation: function (expectation) {
return this._withSettings({
expectations: this._settings.expectations.concat([ expectation ])
});
},
asRoot: function () {
return this._withSettings({ asRoot: true });
},
withFlags: function (flags) {
if (flags != null) {
return this._withSettings({
flags: Object.assign({}, this._settings.flags, flags)
});
} else {
return this;
}
},
withEnvironment: function (environment) {
if (environment != null) {
return this._withSettings({
environment: Object.assign({}, this._settings.environment, environment)
});
} else {
return this;
}
},
withModifier: function (modifierFunction) {
if (modifierFunction != null) {
return modifierFunction(this);
} else {
return this;
}
},
expectOnStdout: function (adapter) {
return this._withExpectation({
channel: "stdout",
adapter: adapter
});
},
requireOnStdout: function (adapter) {
return this._withExpectation({
channel: "stdout",
adapter: adapter,
required: true
});
},
failOnStdout: function (adapter) {
return this._withExpectation({
channel: "stdout",
adapter: adapter,
disallowed: true
});
},
expectOnStderr: function (adapter) {
return this._withExpectation({
channel: "stderr",
adapter: adapter
});
},
requireOnStderr: function (adapter) {
return this._withExpectation({
channel: "stderr",
adapter: adapter,
required: true
});
},
failOnStderr: function (adapter) {
return this._withExpectation({
channel: "stderr",
adapter: adapter,
disallowed: true
});
},
failOnAnyStderr: function () {
return this._withExpectation({
channel: "stderr",
adapter: null,
disallowed: true
});
},
then: function () {
throw new Error("Attempted to use a command builder as a Promise; you probably forgot to call .execute");
},
execute: function () {
return Promise.try(() => {
let effectiveCommand = command;
let effectiveArgs = flagsToArgs(this._settings.flags).concat(args);
if (this._settings.asRoot) {
effectiveCommand = "sudo";
effectiveArgs = [command].concat(effectiveArgs);
}
// FIXME: Shouldn't we represent this in its original form, or at least an escaped form? And suffix 'Unsafe' to ensure it's not used in any actual execution code.
let effectiveCompleteCommand = [effectiveCommand].concat(effectiveArgs);
return Promise.try(() => {
debug(`Running: ${effectiveCommand} ${effectiveArgs.map((arg) => `"${arg}"`).join(" ")}`);
return execFileAsync(effectiveCommand, effectiveArgs, {
env: Object.assign({}, process.env, this._settings.environment)
});
}).then(({stdout, stderr}) => {
return { stdout, stderr, exitCode: 0 };
}).catch((error) => {
let {stdout, stderr} = error;
let exitCode = (typeof error.code === "number") ? error.code : null;
return { stdout, stderr, error, exitCode };
}).then(({stdout, stderr, error, exitCode}) => {
try {
let channels = { stdout, stderr };
if (!this._settings.expectedExitCodes.includes(exitCode)) {
// FIXME: Can we actually pass `error` to be chained onto here, when there's a case where `error` is undefined? Namely, when requiring a non-zero exit code, but the process exits with 0.
throw new errors.NonZeroExitCode.chain(error, `Expected exit code to be one of ${JSON.stringify(this._settings.expectedExitCodes)}, but got '${exitCode}'`, {
exitCode: exitCode,
stdout: stdout,
stderr: stderr
});
} else {
let expectationResults = this._settings.expectations
.map((expectation) => {
if (expectation.adapter == null) {
if (channels[expectation.channel] != null) {
if (channels[expectation.channel].length > 0) {
throw new errors.UnexpectedOutput(`Encountered output on '${expectation.channel}', but no output was supposed to be produced there`, {
failedChannel: expectation.channel
});
} else {
return undefined;
}
} else {
throw new Error(`Encountered expectation for unexpected channel '${expectation.channel}'; this is a bug, please report it`, {
failedChannel: expectation.channel
});
}
} else {
let result = asExpression(() => {
try {
return expectation.adapter.parse(channels[expectation.channel].toString());
} catch (error) {
// TODO: What if both `required` *and* `disallowed`? Can that ever occur, conceptually speaking?
if (error instanceof textParser.NoResult) {
// FIXME: Annotate to make error source clearer?
if (expectation.required === true) {
throw error;
} else {
return undefined;
}
} else {
throw errors.OutputParsingFailed.chain(error, `An error occurred while parsing '${expectation.channel}'`, {
failedChannel: expectation.channel
});
}
}
});
if (result !== undefined && (typeof result !== "object" || Array.isArray(result))) {
throw new Error(`Output adapters may only return a plain object from their parse method (or nothing at all)`);
} else if (result !== undefined && expectation.disallowed === true) {
// TODO: How to make this error more informative?
throw new errors.UnexpectedOutput(`Encountered output on '${expectation.channel}' that isn't supposed to be there`, {
failedChannel: expectation.channel
});
} else {
return result;
}
}
})
.filter((result) => {
return (result != null);
});
let mergedResults = (expectationResults.length > 0)
? this._settings.resultMerger(expectationResults)
: expectationResults[0];
return {
exitCode: exitCode,
stdout: stdout,
stderr: stderr,
result: mergedResults
};
}
} catch (error) {
// FIXME: Use getAllContext
let message = (error.failedChannel != null)
? `Failed while processing ${error.failedChannel} of command`
: "Failed while processing result of command execution";
throw errors.CommandExecutionFailed.chain(error, message, {
exitCode: exitCode,
stdout: stdout,
stderr: stderr
});
}
}).catch(rethrowAs(errors.CommandExecutionFailed, `An error occurred while executing '${command}'`, {
command: effectiveCompleteCommand
}));
});
}
};
};

@ -1,22 +1,35 @@
"use strict"; "use strict";
const Promise = require("bluebird"); const Promise = require("bluebird");
const execBinary = require("../exec-binary"); const execBinary = require("../exec-binary");
const parseIECBytes = require("../parse/bytes/iec"); const parseIECBytes = require("../parse-bytes-iec");
const parseMountOptions = require("../parse/mount-options"); const parseMountOptions = require("../parse-mount-options");
const createJsonParser = require("../text-parser-json");
// TODO: Should we replace this with a read from /proc/mounts directly?
// Ref. https://unix.stackexchange.com/a/18067
function mapMountList(mounts) { function mapMountList(mounts) {
return mounts.map((mount) => { return mounts.map((mount) => {
/* Some poorly-documented pseudo-filesystems were not worth investigating mount options for, yet. For those, we silently ignore missing/unknown entries. */ // Some poorly-documented pseudo-filesystems were not worth investigating mount options for, yet. For those, we silently ignore missing/unknown entries.
let missingOptionsAllowed = ["cgroup", "cgroup2", "bpf", "pstore"].includes(mount.fstype); // TODO: FUSE should eventually be removed from this list
let missingOptionsAllowed = ["cgroup", "cgroup2", "bpf", "pstore", "fuse"].includes(mount.fstype);
let parsedOptions = parseMountOptions(mount.fstype, mount.options); let parsedOptions = parseMountOptions(mount.fstype, mount.options);
// For dealing with bind mounts, which have a path suffix, like eg.:
// /dev/disk/by-uuid/692937db-d74c-4110-b55b-41a20f1b9342[/nix/store]
let cleanSourceDevice = (mount.source.startsWith("/"))
? mount.source.replace(/\[.+$/, "")
: null; // Pseudo-filesystems
if (missingOptionsAllowed || parsedOptions.missing.length === 0) { if (missingOptionsAllowed || parsedOptions.missing.length === 0) {
return { return {
id: mount.id, id: mount.id,
sourceDevice: mount.source, sourceDevice: cleanSourceDevice,
isBindMount: (cleanSourceDevice != null)
? mount.source.includes("[") // FIXME: Is this actually correct? Can a square bracket ever appear in a device path legitimately?
: false, // Pseudo-filesystems
mountpoint: mount.target, mountpoint: mount.target,
filesystem: mount.fstype, filesystem: mount.fstype,
options: parsedOptions.parsed, options: parsedOptions.parsed,
@ -57,9 +70,7 @@ let columns = [
"TID", "TID",
"ID", "ID",
"OPT-FIELDS", "OPT-FIELDS",
"PROPAGATION", "PROPAGATION"
// "FREQ",
// "PASSNO"
]; ];
module.exports = function findmnt() { module.exports = function findmnt() {
@ -69,12 +80,9 @@ module.exports = function findmnt() {
json: true, json: true,
o: columns.join(",") o: columns.join(",")
}) })
.singleResult() .requireOnStdout(createJsonParser())
.expectJsonStdout((result) => {
return mapMountList(result.filesystems);
})
.execute(); .execute();
}).then((output) => { }).then((output) => {
return output.result; return mapMountList(output.result.filesystems);
}); });
}; };

@ -0,0 +1,67 @@
"use strict";
const Promise = require("bluebird");
const matchValue = require("match-value");
const execBinary = require("../exec-binary");
const parseIECBytes = require("../parse-bytes-iec");
const createJSONParser = require("../text-parser-json");
function parseBoolean(value) {
if (typeof value === "boolean") {
// Newer versions of `lsblk` correctly use boolean values
return value;
} else {
return matchValue(value, {
0: false,
1: true
});
}
}
function mapType(value) {
return matchValue(value, {
part: "partition",
disk: "disk",
loop: "loopDevice",
rom: "disk",
lvm: "partition"
});
}
function mapSubType(value) {
return matchValue(value, {
part: null,
disk: null,
loop: null,
rom: "readOnlyMedia",
lvm: "lvm"
});
}
function mapDeviceList(devices) {
return devices.map((device) => {
return {
name: device.name,
path: device.path,
type: mapType(device.type),
subType: mapSubType(device.type),
mountpoint: device.mountpoint,
deviceNumber: device["maj:min"],
removable: parseBoolean(device.rm),
readOnly: parseBoolean(device.ro),
size: parseIECBytes(device.size),
children: (device.children != null) ? mapDeviceList(device.children) : []
};
});
}
module.exports = function lsblk() {
return Promise.try(() => {
return execBinary("lsblk")
.withFlags({ json: true, "output-all": true })
.requireOnStdout(createJSONParser())
.execute();
}).then((output) => {
return mapDeviceList(output.result.blockdevices);
});
};

@ -0,0 +1,12 @@
"use strict";
const errorChain = require("error-chain");
module.exports = {
InvalidPath: errorChain.create("InvalidPath"),
InvalidName: errorChain.create("InvalidName"),
PartitionExists: errorChain.create("PartitionExists"),
VolumeGroupExists: errorChain.create("VolumeGroupExists"),
InvalidVolumeGroup: errorChain.create("InvalidVolumeGroup"),
PhysicalVolumeInUse: errorChain.create("PhysicalVolumeInUse"),
};

@ -1,10 +1,10 @@
"use strict"; "use strict";
const Promise = require("bluebird"); const Promise = require("bluebird");
const execBinary = require("../exec-binary"); const execBinary = require("../exec-binary");
const errors = require("../errors"); const parseIECBytes = require("../parse-bytes-iec");
const parseIECBytes = require("../parse/bytes/iec");
const errors = require("./errors");
function mapVersionTitle(title) { function mapVersionTitle(title) {
if (title === "LVM version") { if (title === "LVM version") {
@ -51,10 +51,13 @@ function hasFlag(flag) {
/* The below counts *any* kind of non-null value as having a flag set, to accommodate matchAll scenarios and scenarios where the flag needs to contain further information. */ /* The below counts *any* kind of non-null value as having a flag set, to accommodate matchAll scenarios and scenarios where the flag needs to contain further information. */
return (context.result != null && context.result[flag] != null); return (context.result != null && context.result[flag] != null);
} else {
return false;
} }
}; };
} }
// FIXME: Convert to new execBinary API
module.exports = { module.exports = {
getVersions: function () { getVersions: function () {
return Promise.try(() => { return Promise.try(() => {
@ -236,4 +239,4 @@ module.exports = {
} }
}; };
// TODO: Need to check if cache service running? // TODO: Need to check if cache service running?

@ -0,0 +1,42 @@
"use strict";
const Promise = require("bluebird");
const execAll = require("execall");
const execBinary = require("../exec-binary");
function createNamespaceParser() {
return {
supportsStreams: false,
parse: function (input) {
return {
namespaces: execAll(/^\[\s*[0-9]+\]:(?:(0)|0x([0-9A-F]+))$/gm, input)
.map((match) => {
let [ idLiteral, idHex ] = match.sub;
if (idLiteral != null) {
/* NOTE: This is a special case for when the value is exactly 0 - and maybe there are others too, hence still doing a parseInt, so we can easily change the regex later if needed:
https://stackoverflow.com/questions/11922876/what-does-a-hash-sign-do-in-printf#comment15879638_11922887
https://github.com/linux-nvme/nvme-cli/blob/f9ebefe27b0596006d76d58f3219a9fc12e88664/nvme.c#L979
*/
return parseInt(idLiteral);
} else {
return parseInt(idHex, 16);
}
})
};
}
};
}
module.exports = {
listNamespaces: function ({ devicePath }) {
return Promise.try(() => {
return execBinary("nvme", [ "list-ns", devicePath ])
.asRoot()
.expectOnStdout(createNamespaceParser())
.execute();
}).then((output) => {
return output.result.namespaces;
});
}
};

@ -0,0 +1,57 @@
"use strict";
const Promise = require("bluebird");
const path = require("path");
const execBinary = require("../exec-binary");
const createPegParser = require("../text-parser-pegjs");
const itemsToObject = require("../items-to-object");
/* FIXME: Error handling, eg. device not found errors */
function outputParser(rootRule) {
return createPegParser({
grammarFile: path.join(__dirname, "./parser.pegjs"),
options: {
allowedStartRules: [ rootRule ]
}
});
}
module.exports = {
attributes: function ({ devicePath }) {
return Promise.try(() => {
return execBinary("smartctl", [devicePath])
.asRoot()
.withFlags({ attributes: true })
.requireOnStdout(outputParser("RootAttributes"))
.execute();
}).then((output) => {
// NOTE: Ignore the header, for now
return output.result.attributes;
});
},
info: function ({ devicePath }) {
return Promise.try(() => {
return execBinary("smartctl", [devicePath])
.asRoot()
.withFlags({ info: true })
.requireOnStdout(outputParser("RootInfo"))
.execute();
}).then((output) => {
// NOTE: Ignore the header, for now
return itemsToObject(output.result.fields);
});
},
scan: function () {
return Promise.try(() => {
return execBinary("smartctl")
.asRoot()
.withFlags({ scan: true })
.requireOnStdout(outputParser("RootScan"))
.execute();
}).then((output) => {
// NOTE: Ignore the header, for now
return output.result.devices;
});
}
};

@ -0,0 +1,16 @@
"use strict";
module.exports = function mapAttributeFlags(flagAsNumber) {
if (flagAsNumber & 128 || flagAsNumber & 64) {
throw new Error(`Encountered unknown flag byte in flag ${flagAsNumber.toString(16).padStart(4, "0")}`);
} else {
return {
autoKeep: Boolean(flagAsNumber & 32),
eventCount: Boolean(flagAsNumber & 16),
errorRate: Boolean(flagAsNumber & 8),
affectsPerformance: Boolean(flagAsNumber & 4),
updatedOnline: Boolean(flagAsNumber & 2),
indicatesFailure: Boolean(flagAsNumber & 1),
};
}
};

@ -0,0 +1,249 @@
{
const matchValue = require("match-value");
const syncpipe = require("syncpipe");
const {B} = require("../unit-bytes-iec");
const mapAttributeFlags = require("./map-attribute-flags");
}
RootInfo
= header:Header infoSection:InfoSection Newline* {
return { ...header, fields: infoSection }
};
RootScan
= devices:ScanDevice* {
return { devices: devices };
}
RootAttributes
= header:Header attributesSection:AttributesSection Newline* {
return { ...header, attributes: attributesSection }
};
_
= (" " / "\t")*
RestOfLine
= content:$[^\n]+ Newline {
return content;
}
Newline
= "\n"
/ "\r\n"
Header 'header'
= "smartctl " versionString:RestOfLine "Copyright" copyrightStatement:RestOfLine Newline {
return { versionString, copyrightStatement };
}
BytesValue
= value:SeparatedNumberValue {
return B(value);
}
NumberValue
= value:$[0-9]+ {
return parseInt(value);
}
SeparatedNumberValue
= value:$[0-9,]+ {
return syncpipe(value, [
(_) => _.replace(/,/g, ""),
(_) => parseInt(_)
]);
}
HexNumberValue
= value:$[0-9A-Fa-f]+ {
return parseInt(value, 16);
}
IdentifierValue
= value:$[a-zA-Z_-]+ {
return value;
}
// smartctl --scan
ScanDevice 'scanned device'
= path:$[^ ]+ _ "-d" _ interface_:$[^ ]+ _ RestOfLine {
return { path: path, interface: interface_ };
}
// smartctl --info
InfoSection 'information section'
= "=== START OF INFORMATION SECTION ===" Newline fields:(InfoField+) {
return fields.filter((field) => field != null);
}
InfoField 'information field'
= InfoFieldSimple
/ InfoFieldIgnored
/ InfoFieldSize
/ InfoFieldRPM
/ InfoFieldSectorSizes
/ InfoFieldBoolean
/ InfoFieldUnknown
InfoFieldSimpleKey
= "Device Model" { return "model"; }
/ "Model Number" { return "model"; }
/ "Model Family" { return "modelFamily"; }
/ "Serial Number" { return "serialNumber"; }
/ "LU WWN Device Id" { return "wwn"; }
/ "Firmware Version" { return "firmwareVersion"; }
/ "Form Factor" { return "formFactor"; }
/ "ATA Version is" { return "ataVersion"; }
/ "SATA Version is" { return "sataVersion"; }
InfoFieldSimple
= key:InfoFieldSimpleKey ":" _ value:RestOfLine {
return { key: key, value: value };
}
InfoFieldUnknown
= key:$[^:]+ ":" _ RestOfLine {
console.warn(`Encountered unrecognized SMART info key: ${key}`);
return null;
}
InfoFieldIgnoredKey
= "Device is"
/ "Local Time is"
InfoFieldIgnored
= key:InfoFieldIgnoredKey ":" _ RestOfLine {
return null;
}
/ "SMART support is:" _ ("Available" / "Unavailable") RestOfLine {
// We don't actually care about this entry, but have to specify its possible values explicitly, to distinguish it from the entry we *do* care about that (annoyingly) uses the same key; see InfoFieldBoolean
return null;
}
InfoFieldSize
// NOTE: We don't actually care about the human-friendly display size after the 'bytes' specifier, hence the RestOfLine
= InfoFieldSizeKey _ value:SeparatedNumberValue _ "bytes"? _ RestOfLine {
return {
key: "size",
value: B(value)
};
}
InfoFieldSizeKey
= "User Capacity:"
/ "Total NVM Capacity:"
InfoFieldRPM
= "Rotation Rate:" _ value:NumberValue _ "rpm" Newline {
return {
key: "rpm",
value: value
};
}
InfoFieldSectorSizes
= "Sector Sizes:" _ logicalSize:BytesValue _ "bytes logical," _ physicalSize:BytesValue _ "bytes physical" Newline {
return {
key: "sectorSizes",
value: {
logical: logicalSize,
physical: physicalSize
}
};
}
InfoFieldBooleanKey
= "SMART support is" { return "smartEnabled"; }
InfoFieldBoolean
= key:InfoFieldBooleanKey ":" _ value:RestOfLine {
return {
key: key,
value: matchValue(value, {
Enabled: true,
Disabled: false
})
};
}
// smartctl --attributes
AttributesSection
= AttributesSectionSATA
/ AttributesSectionNVMe
AttributesSectionSATA
= "=== START OF READ SMART DATA SECTION ===" Newline
"SMART Attributes Data Structure revision number:" _ NumberValue Newline
"Vendor Specific SMART Attributes with Thresholds:" Newline
"ID#" _ "ATTRIBUTE_NAME" _ "FLAG" _ "VALUE" _ "WORST" _ "THRESH" _ "TYPE" _ "UPDATED" _ "WHEN_FAILED" _ "RAW_VALUE" Newline
attributes:AttributeFieldSATA+ {
return attributes;
}
AttributesSectionNVMe
= "=== START OF SMART DATA SECTION ===" Newline
"SMART/Health Information (NVMe Log 0x02)" Newline
attributes:AttributeFieldNVMe+ {
return attributes;
}
AttributeFlags
= "0x" number:HexNumberValue {
return mapAttributeFlags(number);
}
AttributeUpdatedWhen
= "Always"
/ "Offline"
AttributeFailedWhen
= "FAILING_NOW"
/ "In_the_past"
/ "-"
AttributeFieldType
= "Pre-fail"
/ "Old_age"
AttributeFieldSATA
= _ id:NumberValue
_ attributeName:IdentifierValue
_ flags:AttributeFlags
_ value:NumberValue
_ worstValue:NumberValue
_ threshold:NumberValue
_ type:AttributeFieldType
_ updatedWhen:AttributeUpdatedWhen
_ failedWhen:AttributeFailedWhen
_ rawValue:RestOfLine {
return {
id,
attributeName,
flags,
value,
worstValue,
threshold,
rawValue,
updatedWhen: matchValue(updatedWhen, {
"Always": "always",
"Offline": "offline"
}),
type: matchValue(type, {
"Pre-fail": "preFail",
"Old_age": "oldAge"
}),
failingNow: (failedWhen === "FAILING_NOW"),
/* TODO: Should the below include the FAILING_NOW state? */
failedBefore: (failedWhen === "In_the_past")
};
}
AttributeFieldNVMe
= label:$[^:]+ ":" _ value:RestOfLine {
return { label: label, value };
}

@ -65,6 +65,8 @@ module.exports = {
return prepare(moduleRoot, options); return prepare(moduleRoot, options);
} }
}).then((result) => { }).then((result) => {
console.log("QUERY RESULT:", require("util").inspect(result, { colors: true, depth: null }));
let mergedOptions = Object.assign({}, options, result); let mergedOptions = Object.assign({}, options, result);
return renderComponent(componentAtPath(moduleRoot, componentPath), mergedOptions); return renderComponent(componentAtPath(moduleRoot, componentPath), mergedOptions);
}); });
@ -74,6 +76,6 @@ module.exports = {
} }
}); });
}).asCallback(callback); }).asCallback(callback);
} };
} }
}; };

@ -0,0 +1,22 @@
"use strict";
const findInTree = require("./");
let tree = [{
name: "a",
children: [
{ name: "a1" },
{ name: "a2",
children: [
{ name: "a2a" },
{ name: "a2b" },
{ name: "a2c" }
]
}
]
}, {
name: "b"
}];
console.log(findInTree({ tree, predicate: (item) => item.name === "a2" }));
console.log(findInTree({ tree, predicate: (item) => item.name === "nonexistent" }));

@ -0,0 +1,35 @@
"use strict";
const { validateOptions, required, isFunction, isString } = require("validatem");
const assureArray = require("assure-array");
const isIterable = require("is-iterable");
module.exports = function findInTree(options) {
validateOptions(arguments, {
tree: [ required ],
predicate: [ required, isFunction ],
childrenProperty: [ isString ],
});
let childrenProperty = options.childrenProperty ?? "children";
let topLevelItems = assureArray(options.tree);
let predicate = options.predicate;
function find(items) {
if (isIterable(items)) {
for (let item of items) {
if (predicate(item)) {
return item;
} else {
let childResult = find(item[childrenProperty]);
if (childResult !== undefined) {
return childResult;
}
}
}
}
}
return find(topLevelItems);
};

@ -0,0 +1,126 @@
"use strict";
const Promise = require("bluebird");
const objectFromEntries = require("object.fromentries");
const util = require("util");
function resolveFromDataSource(dataContext, dataSource, id) {
if (dataContext[dataSource] != null) {
return dataContext[dataSource].load(id);
} else {
throw new Error(`Specified data source '${dataSource}' does not exist`);
}
}
function withProperty(dataSource, id, property) {
return withData(dataSource, id, (value) => {
return value[property];
});
}
function withData(dataSource, id, callback) {
return function (args, context) {
let { data } = context;
return Promise.try(() => {
return resolveFromDataSource(data, dataSource, id);
}).then((value) => {
if (value != null) {
// FIXME: Inject 'properties'
return callback(value, args, context);
} else {
// QUESTION: Why do we disallow this again?
throw new Error(`Got a null-ish value from data source '${dataSource}' for ID '${util.inspect(id)}'`);
}
});
};
}
function withDynamicHandler(handler, object) {
return function (args, context) {
let { data } = context;
function resolveProperty(property, fromObject = object) {
if (typeof fromObject[property] !== "function") {
throw new Error(`FIXME: Properties can apparently be non-functions`);
}
return fromObject[property](args, context);
}
let extendedContext = {
... context,
resolveProperty: resolveProperty,
resolveProperties: function (properties, fromObject) {
return Promise.map(properties, (property) => {
return Promise.try(() => {
return resolveProperty(property, fromObject);
}).then((value) => {
return [ property, value ];
});
}).then((entries) => {
return objectFromEntries(entries);
});
},
resolvePropertyPath: function (propertyPath, fromObject) {
let initialObject = fromObject ?? object;
return Promise.reduce(propertyPath, (last, property) => {
if (last != null) {
return resolveProperty(property, last);
}
}, initialObject);
},
resolveDataSource: function (dataSource, id) {
return resolveFromDataSource(data, dataSource, id);
}
};
return handler(args, extendedContext);
};
}
let ID = Symbol("ID");
let LocalProperties = Symbol("LocalProperties");
let Dynamic = Symbol("Dynamic");
module.exports = {
ID: ID,
Dynamic: Dynamic,
LocalProperties: LocalProperties,
createDataObject: function createDataObject(mappings) {
let object = {};
if (mappings[LocalProperties] != null) {
Object.assign(object, mappings[LocalProperties]);
}
if (mappings[Dynamic] != null) {
for (let [property, handler] of Object.entries(mappings[Dynamic])) {
object[property] = withDynamicHandler(handler, object);
}
}
for (let [dataSource, items] of Object.entries(mappings)) {
if (items[ID] != null) {
let id = items[ID];
for (let [property, source] of Object.entries(items)) {
if (object[property] == null) {
if (typeof source === "string") {
object[property] = withProperty(dataSource, id, source);
} else if (typeof source === "function") {
object[property] = withData(dataSource, id, source);
} /* FIXME: else */
} else {
throw new Error(`Handler already defined for property '${property}' - maybe you specified it twice for different data sources?`);
}
}
} else {
throw new Error(`No object ID was provided for the '${dataSource}' data source`);
}
}
return object;
}
};

@ -0,0 +1,17 @@
"use strict";
const graphql = require("graphql");
module.exports = function createGraphQLInterface(schema, options, root) {
return function makeQuery(query, args) {
return graphql.graphql({
schema: schema,
source: query,
rootValue: root,
contextValue: {
data: (options.loaderFactory != null) ? options.loaderFactory() : {}
},
variableValues: args
});
};
};

@ -0,0 +1,9 @@
"use strict";
const objectFromEntries = require("object.fromentries");
module.exports = function itemsToObject(items) {
// Maps Array<{key, value}> to an Object<key, value>
let entries = items.map(({ key, value }) => [ key, value ]);
return objectFromEntries(entries);
};

@ -3,16 +3,46 @@
/* TODO: /* TODO:
toDisplay toDisplay
conversion between unit scales (eg. IEC -> metric bytes) conversion between unit scales (eg. IEC -> metric bytes)
ensure NaN is handled correctly
*/ */
const util = require("util"); const util = require("util");
const chalk = require("chalk"); const chalk = require("chalk");
const { validateArguments, required } = require("@validatem/core");
const arrayOf = require("@validatem/array-of");
const isString = require("@validatem/is-string");
const isNumber = require("@validatem/is-number");
const dynamic = require("@validatem/dynamic");
const anything = require("@validatem/anything");
const allowExtraProperties = require("@validatem/allow-extra-properties");
function capitalize(string) { function capitalize(string) {
return string[0].toUpperCase() + string.slice(1); return string[0].toUpperCase() + string.slice(1);
} }
module.exports = function makeUnits(unitSpecs) { module.exports = function makeUnits(_unitSpecs) {
let [ unitSpecs ] = validateArguments(arguments, {
unitSpecs: [
arrayOf([
{
unit: [ required, isString ],
toNext: [ isNumber ]
},
dynamic((_value, { arrayIndex, arrayLength }) => {
// FIXME: Actually test this
let isLast = (arrayIndex === arrayLength - 1);
if (isLast) {
return anything;
} else {
return allowExtraProperties({ toNext: [ required ] });
}
})
]),
]
});
let resultObject = {}; let resultObject = {};
unitSpecs.forEach((spec, i) => { unitSpecs.forEach((spec, i) => {
@ -54,7 +84,7 @@ module.exports = function makeUnits(unitSpecs) {
} }
} }
return createOfCurrentMagnitude; return createOfCurrentMagnitude();
} }
}; };
@ -91,4 +121,4 @@ module.exports = function makeUnits(unitSpecs) {
}); });
return resultObject; return resultObject;
}; };

@ -0,0 +1,15 @@
"use strict";
// FIXME: Finish this later
const { validateArguments, required, isString, isFunction } = require("validatem");
module.exports = function mapTree(tree, predicate, childrenProperty) {
validateArguments(arguments, [
[ "tree", required ],
[ "predicate", required, isFunction ],
[ "childrenProperty", isString ]
]);
};

@ -0,0 +1,25 @@
"use strict";
const { validateArguments, required } = require("@validatem/core");
const isString = require("@validatem/is-string");
const isRegularExpression = require("@validatem/is-regular-expression");
module.exports = function matchOrError(regex, string) {
validateArguments(arguments, [
[ "regex", required, isRegularExpression ],
[ "string", required, isString ]
]);
let match = regex.exec(string);
if (match == null) {
throw new Error(`Regular expression ${regex.toString()} failed to match on string: ${string}`);
} else {
// NOTE: Follows `execall` format: https://www.npmjs.com/package/execall
return {
match: match[0],
subMatches: match.slice(1),
index: match.index
};
}
};

@ -0,0 +1,9 @@
"use strict";
module.exports = function maybePrefix(prefix, text) {
if (text == null) {
return text;
} else {
return `${prefix} ${text}`;
}
};

@ -1,6 +1,6 @@
"use strict"; "use strict";
const {B, KiB, MiB, GiB, TiB, PiB, EiB} = require("../../units/bytes/iec"); const {B, KiB, MiB, GiB, TiB, PiB, EiB} = require("../unit-bytes-iec");
let unitMap = { let unitMap = {
b: B, b: B,
@ -21,7 +21,7 @@ function mapUnit(unitString) {
if (unitMap[normalizedUnitString] != null) { if (unitMap[normalizedUnitString] != null) {
return unitMap[normalizedUnitString]; return unitMap[normalizedUnitString];
} else { } else {
throw new Error(`Unknown unit: ${unit}`); throw new Error(`Unknown unit: ${unitString}`);
} }
} }
} }
@ -41,4 +41,4 @@ module.exports = function parseIECBytes(sizeString) {
return unitCreator(parseFloat(number)); return unitCreator(parseFloat(number));
} }
}; };

@ -1,9 +1,9 @@
"use strict"; "use strict";
const parseIECBytes = require("./bytes/iec"); const parseIECBytes = require("../parse-bytes-iec");
/* NOTE: This parsing module is for a special case; values that are normally handled using the Linux kernel's `memparse` function (which parses memory suffixes like K, M, G, etc.) but that do *not* semantically represent bytes. */ /* NOTE: This parsing module is for a special case; values that are normally handled using the Linux kernel's `memparse` function (which parses memory suffixes like K, M, G, etc.) but that do *not* semantically represent bytes. */
module.exports = function parseMemparseValue(value) { module.exports = function parseMemparseValue(value) {
return parseIECBytes(value).toB().amount; return parseIECBytes(value).toB().amount;
}; };

@ -1,13 +1,13 @@
"use strict"; "use strict";
const mapObj = require("map-obj"); const mapObj = require("map-obj");
const matchValue = require("match-value");
const {B, KiB} = require("../units/bytes/iec"); const {B, KiB} = require("../unit-bytes-iec");
const {minutes, seconds, microseconds} = require("../units/time"); const {minutes, seconds, microseconds} = require("../unit-time");
const mapValue = require("../map-value"); const parseOctalMode = require("../parse-octal-mode");
const parseOctalMode = require("./octal-mode"); const parseIECBytes = require("../parse-bytes-iec");
const parseIECBytes = require("./bytes/iec"); const parseMemparseValue = require("../parse-memparse-value");
const parseMemparseValue = require("./memparse");
const matchOrError = require("../match-or-error"); const matchOrError = require("../match-or-error");
let Value = (value) => value; let Value = (value) => value;
@ -17,7 +17,7 @@ let Include = Symbol("Include");
let All = Symbol("All"); let All = Symbol("All");
function MappedValue(mapping) { function MappedValue(mapping) {
return (value) => mapValue(value, mapping); return (value) => matchValue(value, mapping);
} }
let mountOptionMap = { let mountOptionMap = {
@ -368,7 +368,7 @@ let mountOptionMap = {
pagesize: { pageSize: (value) => parseIECBytes(value) }, pagesize: { pageSize: (value) => parseIECBytes(value) },
size: (value) => { size: (value) => {
if (value.includes("%")) { if (value.includes("%")) {
let [percentage] = matchOrError(/^([0-9]+(?:\.[0-9]+))%$/, value); let [ percentage ] = matchOrError(/^([0-9]+(?:\.[0-9]+))%$/, value).subMatches;
return { sizeAsPoolPercentage: parseFloat(percentage) }; return { sizeAsPoolPercentage: parseFloat(percentage) };
} else { } else {
return { size: parseIECBytes(value) }; return { size: parseIECBytes(value) };
@ -376,7 +376,7 @@ let mountOptionMap = {
}, },
min_size: (value) => { min_size: (value) => {
if (value.includes("%")) { if (value.includes("%")) {
let [percentage] = matchOrError(/^([0-9]+(?:\.[0-9]+))%$/, value); let [ percentage ] = matchOrError(/^([0-9]+(?:\.[0-9]+))%$/, value).subMatches;
return { minimumSizeAsPoolPercentage: parseFloat(percentage) }; return { minimumSizeAsPoolPercentage: parseFloat(percentage) };
} else { } else {
return { minimumSize: parseIECBytes(value) }; return { minimumSize: parseIECBytes(value) };
@ -396,6 +396,21 @@ let mountOptionMap = {
pstore: { pstore: {
/* TODO */ /* TODO */
}, },
fuse: {
/* TODO
http://man7.org/linux/man-pages/man8/mount.fuse.8.html
https://www.kernel.org/doc/Documentation/filesystems/fuse.txt
*/
user_id: { mountOwnerId: Value },
group_id: { mountGroupId: Value },
},
"fuse.sshfs": {
// TODO
[Include]: [ "fuse" ]
},
fusectl: {
// TODO
}
}; };
function optionsForFilesystem(filesystem) { function optionsForFilesystem(filesystem) {
@ -467,4 +482,4 @@ module.exports = function parseOptions(filesystem, optionString) {
}; };
} }
}, { parsed: {}, missing: [] }); }, { parsed: {}, missing: [] });
}; };

@ -1,5 +1,8 @@
"use strict"; "use strict";
const { validateArguments, required, isString } = require("validatem");
// FIXME: Length validation?
function parseModeDigit(modeDigit) { function parseModeDigit(modeDigit) {
let integer = parseInt(modeDigit); let integer = parseInt(modeDigit);
@ -39,24 +42,6 @@ function applyMask(target, mask) {
return (target & (~mask)); return (target & (~mask));
} }
module.exports = function parseModeString(modeString, { mask } = {}) {
let hasSpecialBits = (modeString.length === 4);
let modeDigits = intoDigits(modeString);
let maskDigits;
if (mask != null) {
maskDigits = intoDigits(mask);
} else {
maskDigits = [0, 0, 0, 0];
}
let maskedModeDigits = modeDigits.map((digit, i) => {
return applyMask(digit, maskDigits[i])
});
return mapModeDigits(maskedModeDigits, hasSpecialBits);
};
function intoDigits(modeString) { function intoDigits(modeString) {
let parsedDigits = modeString let parsedDigits = modeString
.split("") .split("")
@ -77,4 +62,29 @@ function intoDigits(modeString) {
} else { } else {
throw new Error(`Unrecognized mode string length: ${modeString}`); throw new Error(`Unrecognized mode string length: ${modeString}`);
} }
} }
module.exports = function parseModeString(modeString, { mask } = {}) {
validateArguments(arguments, [
[ "modeString", required, isString ],
[ "options", {
mask: isString
}]
]);
let hasSpecialBits = (modeString.length === 4);
let modeDigits = intoDigits(modeString);
let maskDigits;
if (mask != null) {
maskDigits = intoDigits(mask);
} else {
maskDigits = [0, 0, 0, 0];
}
let maskedModeDigits = modeDigits.map((digit, i) => {
return applyMask(digit, maskDigits[i]);
});
return mapModeDigits(maskedModeDigits, hasSpecialBits);
};

@ -0,0 +1,5 @@
"use strict";
module.exports = function shallowMerge(... objects) {
return Object.assign({}, ... objects);
};

@ -0,0 +1,10 @@
"use strict";
module.exports = function createJsonParser() {
return {
supportsStreams: false,
parse: function (text) {
return JSON.parse(text);
}
};
};

@ -0,0 +1,73 @@
"use strict";
const pegjs = require("pegjs");
const { validateOptions, either, required, isString, isPlainObject, allowExtraProperties } = require("validatem");
const fs = require("fs");
const moduleEval = require("eval");
const vm = require("vm");
const asExpression = require("as-expression");
const textParser = require("../text-parser");
module.exports = function createPegParser({ grammar, grammarFile, options }) {
validateOptions(arguments, [
{
grammar: [ isString ],
grammarFile: [ isString ],
options: [ isPlainObject ]
},
// FIXME: require-either
either(
allowExtraProperties({ grammar: [ required ] }),
allowExtraProperties({ grammarFile: [ required ] })
)
]);
if (grammarFile != null) {
// FIXME: cache
grammar = fs.readFileSync(grammarFile, "utf8");
}
let parserCode = pegjs.generate(grammar, {
... options,
output: "source",
format: "commonjs"
});
let parser = asExpression(() => {
if (grammarFile != null) {
return moduleEval(parserCode, grammarFile, {}, true);
} else {
let exports_ = {};
let sandbox = {
exports: exports_,
module: {
exports: exports_,
},
require: function () {
throw new Error("You cannot use require() when loading a grammar as a string; use the `grammarFile` option instead");
}
};
let script = new vm.Script(parserCode.replace(/^\#\!.*/, ''));
script.runInNewContext(sandbox);
return sandbox.module.exports;
}
});
return {
supportsStreams: false,
parse: function (text) {
try {
return parser.parse(text);
} catch (error) {
if (error.name === "SyntaxError") {
throw textParser.NoResult.chain(error, "Parsing output failed");
} else {
throw error;
}
}
}
};
};

@ -0,0 +1,11 @@
"use strict";
const errorChain = require("error-chain");
module.exports = {
parse: function parseText(text, parser) {
return parser.parse(text);
},
// FIXME: Force global implementation!
NoResult: errorChain.create("NoResult")
};

@ -0,0 +1,11 @@
"use strict";
const path = require("path");
const fs = require("fs");
const parseText = require("./");
const createPegAdapter = require("./peg");
let pegAdapter = createPegAdapter(fs.readFileSync(path.join(__dirname, "test.pegjs"), "utf8"));
console.log(parseText("hello mars", pegAdapter));

@ -0,0 +1,13 @@
root
= "hello" _ location:location {
return { location };
}
_
= ("\t" / " ")+ {
return undefined;
}
location
= "world"
/ "earth"

@ -0,0 +1,6 @@
## API
TODO
NOTE: supportRebuild can be disabled to prevent the consuming code from using the resulting list with `rebuild`, eg. when you're going to provide a filtered list to the consumer (that would not rebuild correctly)
TODO: Maybe make this more abuse-proof by also exposing (and requiring) a `sequenceNumber` property and having the rebuild fail when it's not consecutive?

@ -0,0 +1,33 @@
"use strict";
const treecutter = require("./");
const util = require("util");
function log(value) {
console.log(util.inspect(value, { colors: true, depth: null }));
}
let tree = [{
name: "a",
children: [
{ name: "a1" },
{ name: "a2",
children: [
{ name: "a2a" },
{ name: "a2b" },
{ name: "a2c" }
]
}
]
}, {
name: "b"
}];
let flattened = treecutter.flatten(tree);
log(flattened);
let rebuilt = treecutter.rebuild(flattened);
log(rebuilt);

@ -0,0 +1,111 @@
"use strict";
const { validateArguments, required, isString, isArray, ValidationError } = require("validatem");
const assureArray = require("assure-array");
const shallowMerge = require("../shallow-merge");
function createListValidator() {
let lastSequenceNumber = null;
return function isTreecutterList(value) {
isArray(value);
if (value.some((item) => item._treecutterDepth == null || item._treecutterSequenceNumber == null)) {
throw new ValidationError(`Must be a treecutter-generated list of items`);
} else if (lastSequenceNumber != null && value._treecutterSequenceNumber !== lastSequenceNumber + 1) {
throw new ValidationError(`Must be the original, unfiltered, unsorted treecutter-generated list of items`);
} else {
lastSequenceNumber = value._treecutterSequenceNumber;
}
};
}
let validateTreecutterOptions = {
childrenProperty: isString
};
function defaultOptions(options = {}) {
return {
childrenProperty: options.childrenProperty ?? "children"
};
}
module.exports = {
flatten: function (tree, options) {
validateArguments(arguments, [
[ "tree", required ],
[ "options", validateTreecutterOptions ]
]);
let { childrenProperty } = defaultOptions(options);
let rootItems = assureArray(tree);
let list = [];
let sequenceNumber = 0;
function add(items, depth) {
for (let item of items) {
let listItem = shallowMerge(item, {
_treecutterDepth: depth,
_treecutterSequenceNumber: sequenceNumber
});
// listItem is a copy, so we can do this safely
delete listItem[childrenProperty];
list.push(listItem);
sequenceNumber += 1;
if (item[childrenProperty] != null) {
add(item[childrenProperty], depth + 1);
}
}
}
add(rootItems, 0);
return list;
},
rebuild: function (list, options) {
let isTreecutterList = createListValidator();
validateArguments(arguments, [
[ "list", required, isTreecutterList ],
[ "options", validateTreecutterOptions ]
]);
let { childrenProperty } = defaultOptions(options);
let topLevel = [];
let stack = [];
let currentDepth = list[0]?._treecutterDepth;
for (let item of list) {
let depth = item._treecutterDepth;
let treeItem = shallowMerge(item, {
[childrenProperty]: []
});
// Again, we're operating on a copy.
delete treeItem._treecutterDepth;
delete treeItem._treecutterSequenceNumber;
if (depth >= 0 && depth <= currentDepth + 1) {
if (depth === 0) {
topLevel.push(treeItem);
} else {
stack[depth - 1][childrenProperty].push(treeItem);
}
currentDepth = depth;
stack[depth] = treeItem;
stack.splice(depth + 1); // Remove references higher in the stack, to decrease the chance of a silent failure if there's a bug in the code
} else {
throw new Error(`Encountered an invalid item depth; the item's depth is ${depth}, but the current tree depth is ${currentDepth}; if this list was generated by treecutter, please file a bug!`);
}
}
return topLevel;
}
};

@ -1,6 +1,6 @@
"use strict"; "use strict";
const makeUnits = require("../../make-units"); const makeUnits = require("../make-units");
module.exports = makeUnits([ module.exports = makeUnits([
{unit: "B", toNext: 1024}, {unit: "B", toNext: 1024},
@ -10,4 +10,4 @@ module.exports = makeUnits([
{unit: "TiB", toNext: 1024}, {unit: "TiB", toNext: 1024},
{unit: "PiB", toNext: 1024}, {unit: "PiB", toNext: 1024},
{unit: "EiB"} {unit: "EiB"}
]); ]);

@ -10,4 +10,4 @@ module.exports = makeUnits([
{unit: "minutes", toNext: 60}, {unit: "minutes", toNext: 60},
{unit: "hours", toNext: 24}, {unit: "hours", toNext: 24},
{unit: "days"} {unit: "days"}
]); ]);

@ -0,0 +1,6 @@
"use strict";
module.exports = function unreachable(reason) {
// TODO: Parse the package name out of the stacktrace and include it in the error message?
throw new Error(`${reason} -- this is a bug, please report it!`);
};

@ -4,4 +4,4 @@ const snakeCase = require("snake-case");
module.exports = function upperSnakeCase(value) { module.exports = function upperSnakeCase(value) {
return snakeCase(value).toUpperCase(); return snakeCase(value).toUpperCase();
}; };

@ -1,9 +0,0 @@
"use strict";
module.exports = function prefixTitle(prefix, title) {
if (title == null) {
return title;
} else {
return `${prefix} ${title}`;
}
};

@ -2,52 +2,52 @@
const Promise = require("bluebird"); const Promise = require("bluebird");
const lsblk = require("../wrappers/lsblk"); // const lsblk = require("../packages/exec-lsblk");
const smartctl = require("../wrappers/smartctl"); // const smartctl = require("../packages/exec-smartctl");
const lvm = require("../wrappers/lvm"); // const lvm = require("../packages/exec-lvm");
const {B} = require("../units/bytes/iec"); // const {B} = require("../units/bytes/iec");
function getStorageDevices() { // function getStorageDevices() {
return Promise.try(() => { // return Promise.try(() => {
return lsblk(); // return lsblk();
}).filter((device) => { // }).filter((device) => {
/* FIXME: Move device type filter to GraphQL? */ // /* FIXME: Move device type filter to GraphQL? */
return (device.type === "disk"); // return (device.type === "disk");
}).map((device) => { // }).map((device) => {
return Object.assign({}, device, { // return Object.assign({}, device, {
path: `/dev/${device.name}` // path: `/dev/${device.name}`
}); // });
}).map((device) => { // }).map((device) => {
/* FIXME: Check whether we need to iterate through child disks as well, when dealing with eg. RAID arrays */ // /* FIXME: Check whether we need to iterate through child disks as well, when dealing with eg. RAID arrays */
return Promise.try(() => { // return Promise.try(() => {
return Promise.all([ // return Promise.all([
smartctl.info({ devicePath: device.path }), // smartctl.info({ devicePath: device.path }),
smartctl.attributes({ devicePath: device.path }) // smartctl.attributes({ devicePath: device.path })
]); // ]);
}).then(([info, attributes]) => { // }).then(([info, attributes]) => {
return Object.assign({}, device, { // return Object.assign({}, device, {
information: info, // information: info,
smartData: attributes, // smartData: attributes,
smartStatus: getSmartStatus(attributes) // smartStatus: getSmartStatus(attributes)
}); // });
}); // });
}).then((blockDevices) => { // }).then((blockDevices) => {
console.log(blockDevices); // console.log(blockDevices);
return blockDevices; // return blockDevices;
}); // });
} // }
function sumDriveSizes(drives) { // function sumDriveSizes(drives) {
return drives.reduce((total, device) => { // return drives.reduce((total, device) => {
return total + device.size.toB().amount; // return total + device.size.toB().amount;
}, 0); // }, 0);
} // }
function roundUnit(unit) { // function roundUnit(unit) {
return Object.assign(unit, { // return Object.assign(unit, {
amount: Math.round(unit.amount * 100) / 100 // amount: Math.round(unit.amount * 100) / 100
}); // });
} // }
module.exports = function({db}) { module.exports = function({db}) {
let router = require("express-promise-router")(); let router = require("express-promise-router")();

@ -237,10 +237,33 @@ type MountOptions {
codepage: Int codepage: Int
} }
enum MountType {
ROOT_MOUNT
SUBMOUNT
}
type Mount { type Mount {
path: String! mountpoint: String!
rawOptions: [RawMountOption] type: MountType!
options: MountOptions id: Int!
taskID: Int!
sourceDevice: BlockDevice!
rootPath: String
options: MountOptions!
filesystem: String!
label: String
uuid: String
partitionLabel: String
partitionUUID: String
deviceNumber: String
totalSpace: ByteSize
freeSpace: ByteSize
usedSpace: ByteSize
optionalFields: String
propagationFlags: String
children: [Mount!]!
# FIXME
# rawOptions: [RawMountOption]
} }
type SmartAttributeFlags { type SmartAttributeFlags {
@ -292,19 +315,22 @@ type BlockDevice {
name: String! name: String!
type: BlockDeviceType! type: BlockDeviceType!
path: String! path: String!
mountpoint: String mounts(type: MountType): [Mount!]!
# mountpoint: String
deviceNumber: String! deviceNumber: String!
removable: Boolean! removable: Boolean!
readOnly: Boolean! readOnly: Boolean!
size: ByteSize! size: ByteSize!
parent: BlockDevice
children: [BlockDevice!]! children: [BlockDevice!]!
# For tree linearization
_treecutterDepth: Int
_treecutterSequenceNumber: Int
} }
type PhysicalDrive { type PhysicalDrive {
path: String! path: String!
interface: String! interface: String!
blockDevice: BlockDevice! blockDevice: BlockDevice
allBlockDevices(type: BlockDeviceType): [BlockDevice!]! allBlockDevices(type: BlockDeviceType): [BlockDevice!]!
smartAvailable: Boolean! smartAvailable: Boolean!
smartEnabled: Boolean smartEnabled: Boolean
@ -359,4 +385,4 @@ type ResourcesQuery {
type Query { type Query {
hardware: HardwareQuery! hardware: HardwareQuery!
resources: ResourcesQuery! resources: ResourcesQuery!
} }

@ -108,6 +108,10 @@ table {
} }
table.drives { table.drives {
td {
vertical-align: top;
}
td.smart { td.smart {
&.HEALTHY { &.HEALTHY {
background-color: rgb(0, 165, 0); background-color: rgb(0, 165, 0);
@ -162,4 +166,13 @@ table.drives {
color: rgb(194, 0, 0); color: rgb(194, 0, 0);
} }
} }
} }
.stacktrace {
white-space: pre-wrap;
font-family: monospace;
.irrelevant {
color: gray;
}
}

@ -3,10 +3,11 @@
const Promise = require("bluebird"); const Promise = require("bluebird");
const util = require("util"); const util = require("util");
const lsblk = require("./wrappers/lsblk"); const lsblk = require("./packages/exec-lsblk");
const lvm = require("./wrappers/lvm"); const lvm = require("./packages/exec-lvm");
const smartctl = require("./wrappers/smartctl"); const smartctl = require("./packages/exec-smartctl");
const findmnt = require("./wrappers/findmnt"); const findmnt = require("./packages/exec-findmnt");
const nvmeCli = require("./packages/exec-nvme-cli");
return Promise.try(() => { return Promise.try(() => {
// return lvm.getVersions(); // return lvm.getVersions();
@ -25,10 +26,12 @@ return Promise.try(() => {
// return lvm.addVolumeToVolumeGroup({ volumeGroup: "vg-name", physicalVolume: "/dev/loop1" }); // return lvm.addVolumeToVolumeGroup({ volumeGroup: "vg-name", physicalVolume: "/dev/loop1" });
// return lvm.destroyPhysicalVolume({ devicePath: "/dev/loop0" }); // return lvm.destroyPhysicalVolume({ devicePath: "/dev/loop0" });
// return lsblk(); // return lsblk();
// return smartctl.scan();
// return smartctl.info({ devicePath: "/dev/sda" }) // return smartctl.info({ devicePath: "/dev/sda" })
// return smartctl.info({ devicePath: process.argv[2] }) // return smartctl.info({ devicePath: process.argv[2] })
// return smartctl.attributes({ devicePath: process.argv[2] }) // return smartctl.attributes({ devicePath: process.argv[2] });
return findmnt(); return findmnt();
// return nvmeCli.listNamespaces({ devicePath: "/dev/nvme0" });
}).then((result) => { }).then((result) => {
console.log(util.inspect(result, {colors: true, depth: null})); console.log(util.inspect(result, {colors: true, depth: null}));
}).catch((err) => { }).catch((err) => {

@ -0,0 +1,8 @@
"use strict";
const matchOrError = require("../packages/match-or-error");
module.exports = function deviceNameFromPath(path) {
let [ name ] = matchOrError(/^\/dev\/(.+)$/, path).subMatches;
return name;
};

@ -7,7 +7,7 @@ const fs = Promise.promisifyAll(require("fs-extra"));
const path = require("path"); const path = require("path");
const endOfStreamAsync = Promise.promisify(require("end-of-stream")); const endOfStreamAsync = Promise.promisify(require("end-of-stream"));
const progressIndicator = require("./tasks/progress-indicator"); const progressIndicator = require("../tasks/progress-indicator");
module.exports = function createImageStore(storagePath) { module.exports = function createImageStore(storagePath) {
function getPath(id) { function getPath(id) {

@ -1,31 +1,20 @@
'use strict'; 'use strict';
const joi = require("joi"); const isString = require("@validatem/is-string");
const required = require("@validatem/required");
const dynamic = require("@validatem/dynamic");
const when = require("@validatem/when");
// const checkit = require("checkit"); module.exports = dynamic((object) => ({
// const oneOf = require("../../validators/one-of"); name: [ required, isString ],
description: [ isString ],
module.exports = joi.object({ source: [ required, isString ],
name: joi.string().required(), url: [
description: joi.string(), isString,
source: joi.string().required(), when(() => object.source === "http", [ required ])
url: joi.when("source", { is: "http", then: joi.string().required() }), ],
path: joi.when("source", { is: "local", then: joi.string().required() }) path: [
}); isString,
when(() => object.source === "local", [ required ])
// module.exports = checkit({ ]
// name: "string", }));
// description: "string",
// source: ["required", "string", oneOf([
// "local",
// "http"
// ])]
// }).maybe({
// url: ["required", "string"]
// }, (input) => {
// return (input.source === "http");
// }).maybe({
// path: ["required", "string"]
// }, (input) => {
// return (input.source === "local");
// });

@ -3,8 +3,8 @@
const React = require("react"); const React = require("react");
const classnames = require("classnames"); const classnames = require("classnames");
const {LocalsContext} = require("../../express-async-react"); const {LocalsContext} = require("../../packages/express-async-react");
const isUnderPrefix = require("../../is-under-prefix"); const isUnderPrefix = require("../../util/is-under-prefix");
module.exports = function MenuItem({ path, children }) { module.exports = function MenuItem({ path, children }) {
let {currentPath} = React.useContext(LocalsContext); let {currentPath} = React.useContext(LocalsContext);
@ -17,4 +17,4 @@ module.exports = function MenuItem({ path, children }) {
</a> </a>
</div> </div>
); );
}; };

@ -1,17 +1,31 @@
"use strict"; "use strict";
const React = require("react"); const React = require("react");
const entities = require("entities");
const Layout = require("./layout"); const Layout = require("./layout");
module.exports = { module.exports = {
template: function ErrorPage({ error }) { template: function ErrorPage({ error }) {
let escapedStack = entities.escape(error.stack);
let formattedStack = escapedStack
.split("\n")
.map((line) => {
if (line.includes("node_modules")) {
return `<span class="irrelevant">${line}</span>`;
} else {
return line;
}
})
.join("\n");
return ( return (
<Layout title="An error occurred"> <Layout title="An error occurred">
<div className="error"> <div className="error">
<h1>An error occurred.</h1> <h1>An error occurred.</h1>
<h2>{ error.message }</h2> <h2>{ error.message }</h2>
<pre>{ error.stack }</pre> <div className="stacktrace" dangerouslySetInnerHTML={{ __html: formattedStack }} />
</div> </div>
</Layout> </Layout>
); );

@ -5,7 +5,7 @@ const React = require("react");
const MainLayout = require("../layout"); const MainLayout = require("../layout");
const MenuItem = require("../components/menu-item"); const MenuItem = require("../components/menu-item");
const prefixTitle = require("../../prefix-title"); const prefixTitle = require("../../packages/maybe-prefix");
function Submenu() { function Submenu() {
return (<> return (<>
@ -21,4 +21,4 @@ module.exports = function HardwareLayout({ children, title }) {
{children} {children}
</MainLayout> </MainLayout>
); );
}; };

@ -2,20 +2,50 @@
const React = require("react"); const React = require("react");
const classnames = require("classnames"); const classnames = require("classnames");
const gql = require("../../../packages/graphql-interface/tag");
const Layout = require("../layout"); const Layout = require("../layout");
const gql = require("../../../graphql/tag");
function Indented({ depth, children }) {
return (
<div style={{ paddingLeft: depth * 10 }}>
{children}
</div>
);
}
function MountEntry({ mount }) {
return <div className="mountpoint">{mount.mountpoint}</div>;
}
function PartitionEntry({partition, isLast}) { function PartitionEntry({partition, isLast}) {
function PartitionIndent({ children }) {
return (
<Indented depth={partition._treecutterDepth}>
{children}
</Indented>
);
}
return ( return (
<tr className={classnames("partition", {last: isLast})}> <tr className={classnames("partition", {last: isLast})}>
<td>{partition.name}</td> <td>
<td>{partition.size.toString()}</td> <PartitionIndent>
{partition.name}
</PartitionIndent>
</td>
<td>
<PartitionIndent>
{partition.size.toString()}
</PartitionIndent>
</td>
<td colSpan={5}> <td colSpan={5}>
{(partition.mountpoint != null) <PartitionIndent>
? partition.mountpoint {(partition.mounts.length > 0)
: <span className="notMounted">(not mounted)</span> ? partition.mounts.map((mount) => <MountEntry mount={mount} />)
} : <span className="notMounted">(not mounted)</span>
}
</PartitionIndent>
</td> </td>
</tr> </tr>
); );
@ -27,9 +57,14 @@ function DriveEntry({drive}) {
return (<> return (<>
<tr className={classnames({hasPartitions})}> <tr className={classnames({hasPartitions})}>
<td className={classnames("smart", drive.smartHealth)} rowSpan={1 + drive.partitions.length} /> <td className={classnames("smart", drive.smartHealth)} rowSpan={1 + drive.partitions.length} />
<td>{drive.blockDevice.name}</td> <td>{drive.path}</td>
<td>{drive.size.toDisplay(2).toString()}</td> <td>{drive.size.toDisplay(2).toString()}</td>
<td>{drive.rpm} RPM</td> <td>
{(drive.rpm != null)
? `${drive.rpm} RPM`
: null
}
</td>
<td>{drive.serialNumber}</td> <td>{drive.serialNumber}</td>
<td>{drive.model}</td> <td>{drive.model}</td>
<td>{drive.modelFamily}</td> <td>{drive.modelFamily}</td>
@ -48,6 +83,7 @@ module.exports = {
query { query {
hardware { hardware {
drives { drives {
path
smartHealth smartHealth
size size
rpm rpm
@ -60,10 +96,16 @@ module.exports = {
name name
} }
partitions: allBlockDevices(type: PARTITION) { partitions: allBlockDevices {
_treecutterDepth
_treecutterSequenceNumber
name name
mountpoint
size size
mounts {
mountpoint
}
} }
} }
} }
@ -88,4 +130,4 @@ module.exports = {
</Layout> </Layout>
); );
} }
}; };

@ -1,51 +0,0 @@
"use strict";
const Promise = require("bluebird");
const execBinary = require("../exec-binary");
const parseIECBytes = require("../parse/bytes/iec");
const mapValue = require("../map-value");
function parseBoolean(value) {
return mapValue(value, {
0: false,
1: true
});
}
function mapType(value) {
return mapValue(value, {
part: "partition",
disk: "disk",
loop: "loopDevice"
});
}
function mapDeviceList(devices) {
return devices.map((device) => {
return {
name: device.name,
type: mapType(device.type),
mountpoint: device.mountpoint,
deviceNumber: device["maj:min"],
removable: parseBoolean(device.rm),
readOnly: parseBoolean(device.ro),
size: parseIECBytes(device.size),
children: (device.children != null) ? mapDeviceList(device.children) : []
};
})
}
module.exports = function lsblk() {
return Promise.try(() => {
return execBinary("lsblk")
.withFlags({ json: true })
.singleResult()
.expectJsonStdout((result) => {
return mapDeviceList(result.blockdevices);
})
.execute();
}).then((output) => {
return output.result;
});
};

@ -1,159 +0,0 @@
"use strict";
const Promise = require("bluebird");
const execBinary = require("../exec-binary");
const {B} = require("../units/bytes/iec");
const matchOrError = require("../match-or-error");
const errors = require("../errors");
const mapValue = require("../map-value");
/* FIXME: Error handling, eg. device not found errors */
function mapAttributeFlags(flagString) {
let flagBuffer = Buffer.from(flagString.slice(2), "hex");
let flagByte = flagBuffer.readUInt16BE(0);
if (flagByte & 128 || flagByte & 64) {
throw new Error(`Encountered unknown flag byte in flag ${flagString}`);
} else {
return {
autoKeep: Boolean(flagByte & 32),
eventCount: Boolean(flagByte & 16),
errorRate: Boolean(flagByte & 8),
affectsPerformance: Boolean(flagByte & 4),
updatedOnline: Boolean(flagByte & 2),
indicatesFailure: Boolean(flagByte & 1),
};
}
}
module.exports = {
attributes: function ({ devicePath }) {
return Promise.try(() => {
return execBinary("smartctl", [devicePath])
.asRoot()
.withFlags({ attributes: true })
.singleResult()
.expectStdout("attributes", /^\s*([0-9]+)\s+([a-zA-Z_-]+)\s+(0x[0-9a-f]{4})\s+([0-9]{3})\s+([0-9]{3})\s+([0-9]{3})\s+(Pre-fail|Old_age)\s+(Always|Offline)\s+(FAILING_NOW|In_the_past|-)\s+(.+)$/gm, {
required: true,
matchAll: true,
result: ([id, attributeName, flags, value, worst, threshold, type, updatedWhen, failedWhen, rawValue]) => {
return {
id: parseInt(id),
name: attributeName,
flags: mapAttributeFlags(flags),
value: parseInt(value),
rawValue: rawValue,
worstValueSeen: parseInt(worst),
failureThreshold: parseInt(threshold),
type: mapValue(type, {
"Pre-fail": "preFail",
"Old_age": "oldAge"
}),
failingNow: (failedWhen === "FAILING_NOW"),
/* TODO: Should the below include the FAILING_NOW state? */
failedBefore: (failedWhen === "In_the_past"),
updatedWhen: mapValue(updatedWhen, {
"Always": "always",
"Offline": "offline"
})
};
}
})
.execute();
}).then((output) => {
return output.result;
});
},
info: function ({ devicePath }) {
return Promise.try(() => {
return execBinary("smartctl", [devicePath])
.asRoot()
.withFlags({ info: true })
.expectStdout("smartAvailable", /^SMART support is:\s*(Available|Unavailable|Ambiguous).+$/m, {
result: ([availability]) => {
return mapValue(availability, {
Available: true,
Unavailable: false,
Ambiguous: null
});
}
})
.expectStdout("model", /^Device Model:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("modelFamily", /^Model Family:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("serialNumber", /^Serial Number:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("wwn", /^LU WWN Device Id:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("firmwareVersion", /^Firmware Version:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("size", /^User Capacity:\s*(.+)$/m, {
result: ([value]) => {
try {
let match = matchOrError(/^([0-9,]+) bytes \[[^\]]+\]$/, value);
return B(parseInt(match[0].replace(/,/g, "")));
} catch (error) {
throw errors.UnexpectedOutput.chain(error, "Could not parse drive capacity", { input: value });
}
}
})
.expectStdout("rpm", /^Rotation Rate:\s*(.+)$/m, {
result: ([value]) => {
try {
let match = matchOrError(/^([0-9]+) rpm$/, value);
return parseInt(match[0]);
} catch (error) {
throw errors.UnexpectedOutput.chain(error, "Could not parse drive RPM", { input: value });
}
}
})
.expectStdout("sectorSizes", /^Sector Sizes:\s*(.+)$/m, {
result: ([value]) => {
try {
let match = matchOrError(/^([0-9]+) bytes logical, ([0-9]+) bytes physical$/, value);
return {
logical: B(parseInt(match[0])),
physical: B(parseInt(match[1]))
};
} catch (error) {
throw errors.UnexpectedOutput.chain(error, "Could not parse drive sector sizes", { input: value });
}
}
})
.expectStdout("formFactor", /^Form Factor:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("ataVersion", /^ATA Version is:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("sataVersion", /^SATA Version is:\s*(.+)$/m, { result: ([value]) => value })
.expectStdout("smartEnabled", /^SMART support is:\s*(Enabled|Disabled)$/m, {
result: ([value]) => {
return mapValue(value, {
Enabled: true,
Disabled: false
});
}
})
.execute();
}).then((output) => {
return output.result;
});
},
scan: function () {
return Promise.try(() => {
return execBinary("smartctl")
.asRoot()
.withFlags({ scan: true })
.singleResult()
.expectStdout("devices", /^([^ ]+) -d ([^ ]+) #.+$/gm, {
matchAll: true,
result: ([devicePath, interface_]) => {
return {
path: devicePath,
interface: interface_
};
}
})
.execute();
}).then((output) => {
return output.result;
});
}
};

@ -0,0 +1,58 @@
"use strict";
const Promise = require("bluebird");
const path = require("path");
const execBinary = require("../exec-binary");
const createPegParser = require("../text-parser-pegjs");
const itemsToObject = require("../../packages/items-to-object");
/* FIXME: Error handling, eg. device not found errors */
function outputParser(rootRule) {
return createPegParser({
grammarFile: path.join(__dirname, "./parser.pegjs"),
options: {
allowedStartRules: [ rootRule ]
}
});
}
module.exports = {
attributes: function ({ devicePath }) {
return Promise.try(() => {
return execBinary("smartctl", [devicePath])
.asRoot()
.withFlags({ attributes: true })
.requireOnStdout(outputParser("RootAttributes"))
.execute();
}).then((output) => {
// NOTE: Ignore the header, for now
return output.result.attributes;
});
},
info: function ({ devicePath }) {
return Promise.try(() => {
return execBinary("smartctl", [devicePath])
.asRoot()
.withFlags({ info: true })
.requireOnStdout(outputParser("RootInfo"))
.execute();
}).then((output) => {
// NOTE: Ignore the header, for now
return itemsToObject(output.result.fields);
});
},
scan: function () {
return Promise.try(() => {
return execBinary("smartctl")
.asRoot()
.withFlags({ scan: true })
.requireOnStdout(outputParser("RootScan"))
.execute();
}).then((output) => {
// NOTE: Ignore the header, for now
return output.result.devices;
});
}
};

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save