Browse Source

Initial commit

master
Sven Slootweg 7 months ago
commit
aed19b8171
  1. 3
      .eslintrc
  2. 3
      .gitignore
  3. 55
      notes.txt
  4. 48
      package.json
  5. 80
      roadmap/test-dsl/index.js
  6. 18
      roadmap/test-dsl/migrations/1.js
  7. 21
      roadmap/test-dsl/migrations/2.js
  8. 80
      roadmap/test-schema-roundtrip.js
  9. 49
      shell.nix
  10. 13
      src/backend/lmdb.js
  11. 475
      src/data/timezone-names.js
  12. 8
      src/dev/print-bits.js
  13. 7
      src/generate-id.js
  14. 56
      src/index.js
  15. 112
      src/intersect-many-lists.js
  16. 19
      src/lookup-timezone-name.js
  17. 67
      src/packages/arithmetic-coder-bitmask/index.js
  18. 83
      src/packages/arithmetic-coder/index.js
  19. 50
      src/packages/immutable-deep-merge/index.js
  20. 15
      src/packages/named-cartesian-product/index.js
  21. 150
      src/packages/orderable-varint/index.js
  22. 52
      src/packages/tape-snapshot/index.js
  23. 161
      src/query/methods/v1.js
  24. 21
      src/schema/compute-transform.js
  25. 97
      src/schema/methods/v1.js
  26. 367
      src/schema/reducer.js
  27. 77
      src/schema/rules.js
  28. 9
      src/storage-encoder/bigint/abs.js
  29. 22
      src/storage-encoder/bigint/buffer.js
  30. 12
      src/storage-encoder/bitwise/count-bits.js
  31. 9
      src/storage-encoder/bitwise/count-bytes.js
  32. 5
      src/storage-encoder/bitwise/generate-mask.js
  33. 7
      src/storage-encoder/bitwise/invert-1byte.js
  34. 11
      src/storage-encoder/bitwise/invert.js
  35. 8
      src/storage-encoder/bitwise/truncate-left-bits.js
  36. 25
      src/storage-encoder/coders/README.md
  37. 33
      src/storage-encoder/coders/boolean.js
  38. 64
      src/storage-encoder/coders/bytes.js
  39. 95
      src/storage-encoder/coders/date.js
  40. 57
      src/storage-encoder/coders/decimal.js
  41. 95
      src/storage-encoder/coders/duration.js
  42. 58
      src/storage-encoder/coders/index.js
  43. 22
      src/storage-encoder/coders/integer.js
  44. 37
      src/storage-encoder/coders/string.js
  45. 79
      src/storage-encoder/encode-field.js
  46. 45
      src/storage-encoder/get-value-type.js
  47. 100
      src/storage-encoder/record-coder.js
  48. 9
      src/storage-encoder/type-adapters/from-date.js
  49. 34
      src/storage-encoder/type-adapters/from-temporal.js
  50. 10
      src/storage-encoder/value-checks/is-negative.js
  51. 9
      src/storage-encoder/value-checks/number/is-negative.js
  52. 5
      src/storage-encoder/value-checks/string-number/is-decimal.js
  53. 5
      src/storage-encoder/value-checks/string-number/is-integer.js
  54. 5
      src/storage-encoder/value-checks/string-number/is-negative.js
  55. 12
      src/storage-encoder/value-checks/temporal/is-compatible-time.js
  56. 13
      src/storage-encoder/value-checks/temporal/is-duration.js
  57. 7
      src/util/compose.js
  58. BIN
      tests/storage-encoder/_snapshots/boolean-encoding-snapshot-1.cbor
  59. 1
      tests/storage-encoder/_snapshots/date-encoding-snapshot-1.cbor
  60. BIN
      tests/storage-encoder/_snapshots/integer-encoding-snapshot-1.cbor
  61. 83
      tests/storage-encoder/storage-encoder.js
  62. 69
      tests/storage-encoder/type-adapters/dates.js
  63. 2
      tikv-config.toml
  64. 5000
      yarn.lock
  65. 127
      zapdb-roadmap.txt

3
.eslintrc

@ -0,0 +1,3 @@
{
"extends": "@joepie91/eslint-config"
}

3
.gitignore

@ -0,0 +1,3 @@
node_modules
local_cluster
junk

55
notes.txt

@ -0,0 +1,55 @@
# Date
bitmask 7 bits
year 8192 13 bits
month 12 16 4 bits
day 31 32 5 bits
hour 24 32 5 bits
minute 60 64 6 bits
second 61 64 6 bits
millisecond 1000 1024 10 bits
-----------
56 bits = 7 bytes
# Duration
bitmask 7 bits
sign 1 1 1 bit
years 4096 12 bits
months 12 16 4 bits
days 31 32 5 bits
hours 24 32 5 bits
minutes 60 64 6 bits
seconds 61 64 6 bits
milliseconds 1000 1024 10 bits
-----------
56 bits = 7 bytes
NOTE:
- It must be possible to evaluate schema migrations statelessly; that is, without zero knowledge of what data *currently* exists in the database, a sequence of migrations up to any point must *always* result in a valid schema that:
- does not allow for data to exist in the database which violates the schema's constraints (eg. missing required fields)
- allows full rollback to any previous point in history, with data loss only being permitted in that process if it is fundamentally unavoidable due to the nature of the migration (eg. rolling back the addition of a new field)
- for *any* sequence of migrate-to and rollback operations within the same set of linear migrations, continues to uphold the above two properties
- Make sure that a colum default can be specified separately for new vs. migrated rows - in some cases, the user may want to initialize existing rows with a value derived from that row (eg. to emulate application insertion logic) rather than with the usual column default.
- If both a regular and migration default is specified: use either for its relevant purpose
- If only a migration default is specified: use that for migration, and disallow NULL values in new records
- If only a regular default is specified: use that for both cases
- If neither is specified: this is an error in a changeFields, but allowed in an addFields, and just means NULL values in new records are disallowed
- For the regular default, default functions *do not* receive the previous value; if the user wants to use this, they should specify a migration default
- A migration default only applies for *that specific migration step*, not for any migrations after it, even if the same field is affected. This needs to be specifically ensured to avoid bugs.
- When applying arithmetic directly to integer-encoded decimal numbers, magnitude scaling may be needed; for example:
1.1 * 1.2 = 1.32 (= 1.32 in decimal representation)
11 * 12 = 132 (= 13.2 in decimal representation, as original numbers were scaled by 10x, but this is WRONG)
11 * 12 (/ 10) = 13.2, rounded to 13 (1.3 in decimal representation, CORRECT, even though some precision is lost to conform to the storage precision)
- For user-specified reversal operations in migrations, automatically do a test with some random values to detect errors?
- Make sure to version the DSL import; so that old migrations can continue using older versions of the DSL! At least until there is some kind of codemod mechanism for this.
- Should be some way to 'inherit' an instance from the base database connection, allowing for configuring things like type adapters - this would let the user choose whether to eg. define custom type adapters globally or only for a specific table or such. Need to figure out how this fits into the DSL design where queries are stateless by default. Maybe a custom filter hook that lets the user semi-declaratively specify what queries to apply custom adapters to, or so?
- unsafeForbidRollback must make rollbacks impossible even in hot reload mode; although in *some* cases there might be a default value that could be reset to, it is possible for fields to exist that absolutely require an application-provided value. Therefore, it is not consistently possible to rollback even in a controllably-unsafe manner, when no rollback operation is specified.
Query planning:
- Make list of all 'queried fields', ie. fields which are used to filter or order
- If the first sorting criterium is also a filtering field *and* there is an index for that field, it should be selected as the first index to select from, because then we can implicitly use the order from the index
- Otherwise: apply filters, and if the remaining results set is more than __% of the full collection, and the sorting criterium has an index, reorder the resultset according to that index; if not, do a regular sort on the retrieved-and-decoded record data instead
- Any descending sorts should come *before* any record-fetching filters/criteria, so that it doesn't have to reverse a full result set in memory
- Sorting criteria should be internally rearranged as-needed, to prefer sorting by indexed fields with high cardinality (ie. many different values) first and low cardinality last
- Possible optimization: if the filtered subset appears to comprise most of the table, do a sequential filtering scan of the table instead of retrieving each matched item individually? This might be more efficient for some backends. Maybe backends should be able to configure whether this is the case for them?

48
package.json

@ -0,0 +1,48 @@
{
"name": "zapdb-kv",
"version": "1.0.0",
"main": "src/index.js",
"repository": "git@git.cryto.net:joepie91/zapdb-kv.git",
"author": "Sven Slootweg <admin@cryto.net>",
"license": "WTFPL OR CC0-1.0",
"scripts": {
"server": "concurrently --kill-others -p '[{name}]' -n 'node,PD' -c 'bgBlue.bold,bgRed.bold' './start-server.sh' './start-pd.sh'",
"test": "tape tests/**/*.js | tap-difflet -p"
},
"devDependencies": {
"@joepie91/eslint-config": "^1.1.0",
"cbor": "^8.1.0",
"concurrently": "^6.2.1",
"enzyme": "^3.11.0",
"eslint": "^8.9.0",
"filled-array": "^2.2.0",
"snapshotter": "^3.0.1",
"tap": "^15.1.5",
"tap-difflet": "^0.7.2",
"tape": "^5.3.2"
},
"dependencies": {
"@extra-bigint/log2": "^0.0.53",
"@joepie91/unreachable": "^1.0.0",
"@js-temporal/polyfill": "^0.3.0",
"as-expression": "^1.0.0",
"assure-array": "^1.0.0",
"big-varint": "^0.1.0",
"bigint-buffer": "^1.1.5",
"cartesian-product": "^2.1.2",
"default-value": "^1.0.0",
"fix-esm": "^1.0.1",
"lmdb": "^1.6.6",
"match-value": "^1.1.0",
"merge-by-template": "^0.1.3",
"seed-random": "^2.2.0",
"split-filter-n": "^1.1.3",
"syncpipe": "^1.0.0",
"time-call": "^0.1.0",
"unicode-collation-algorithm2": "^0.3.1",
"varint": "^6.0.0"
},
"snapshotter": {
"snapshotPath": "./tests/_snapshots"
}
}

80
roadmap/test-dsl/index.js

@ -0,0 +1,80 @@
"use strict";
let dummyMigrations = [
{ id: 1, operations: [
{ type: "createCollection", name: "users", operations: [
{ type: "createField", name: "username", fieldType: "string", required: true, attributes: {} },
{ type: "createField", name: "passwordHash", fieldType: "string", required: true, attributes: {} },
{ type: "createField", name: "emailAddress", fieldType: "string", required: false, attributes: {} },
{ type: "createField", name: "isActive", fieldType: "boolean", required: true, attributes: {} },
{ type: "createField", name: "registrationDate", fieldType: "date", required: true, attributes: { withTimezone: false }},
{ type: "createField", name: "invitesLeft", fieldType: "integer", required: true, attributes: {} },
]}
]},
{ id: 2, operations: [
{ type: "modifyCollection", name: "users", operations: [
{ type: "setFieldAttributes", name: "emailAddress", required: false, attributes: {} },
{ type: "setFieldAttributes", name: "isActive", required: true, attributes: {} },
{ type: "setFieldAttributes", name: "registrationDate", attributes: { withTimezone: true }},
{ type: "setFieldAttributes", name: "invitesLeft", attributes: { signed: false }},
]}
]},
];
let dummyItems = [{
username: "joepie91",
passwordHash: "foo",
emailAddress: "admin@cryto.net",
isActive: true,
registrationDate: new Date(),
invitesLeft: 100
}, {
username: "test",
passwordHash: "bar",
emailAddress: "test@cryto.net",
isActive: false,
registrationDate: new Date(),
invitesLeft: 0
}];
const reduceSchema = require("../../src/schema/reducer");
const createRecordCoder = require("../../src/storage-encoder/record-coder");
let schema = reduceSchema(dummyMigrations);
let orderedTableSchema = Object.entries(schema.tables.users.fields)
.map(([ name, settings ]) => {
let { fieldType, ... rest } = settings;
return { name, type: fieldType, ... rest };
})
.sort((a, b) => {
if (b.name < a.name) {
return 1;
} else if (b.name > a.name) {
return -1;
} else {
return 0;
}
});
let tableEncoder = createRecordCoder(orderedTableSchema);
let encodedItems = dummyItems.map((item) => tableEncoder.encode(item));
let decodedItems = encodedItems.map((item) => tableEncoder.decode(item.record));
console.log(tableEncoder);
console.log("# Schema:");
console.dir(schema, { depth: null });
console.log("# Input items:");
console.dir(dummyItems, { depth: null });
console.log("# Encoded items:");
console.dir(encodedItems, { depth: null });
console.log("# Decoded items:");
console.dir(decodedItems, { depth: null });
// MARKER: Auxiliary blob handling, somehow
// MARKER: Implement support for optional fields in record-coder; including encoding the presence mask into the encoded records

18
roadmap/test-dsl/migrations/1.js

@ -0,0 +1,18 @@
"use strict";
const { required, string, integer, boolean, date, optional, defaultTo, addCollection, addFields, addIndex } = require("../../../src/schema/methods/v1");
module.exports = [
addCollection("users", [
addFields({
username: [ required, string ],
passwordHash: [ required, string ],
emailAddress: [ optional, string ],
invitesLeft: [ required, integer, defaultTo(0) ],
isActive: [ required, boolean, defaultTo(false) ],
registrationDate: [ required, date, defaultTo(() => new Date()) ]
}),
addIndex("username"),
addIndex("registrationDate"),
])
];

21
roadmap/test-dsl/migrations/2.js

@ -0,0 +1,21 @@
"use strict";
const { changeCollection, addFields, changeFields, addIndex, decimal, required, defaultExistingTo, precision, defaultTo, withTimezone, unsigned } = require("../../../src/schema/methods/v1");
module.exports = [
changeCollection("users", [
addFields({
karmaScore: [ required, decimal, precision(4), defaultTo(0) ]
}),
changeFields({
// A migration default is mandatory when making a field `required` in a `changeFields`
emailAddress: [ required, defaultExistingTo("INVALID@example.com") ],
// TODO: To detect bugs early, disallow no-op changes in schema modifications?
isActive: [ required ],
// TODO: Changing to *without* a timezone should require an explicit allowDestructive modifier, as it would require normalizing all dates to UTC, losing the original timezone information in the process. Or maybe an `unsafe` wrapper? Like `unsafe(withoutTimezone)` or `destructive(withoutTimezone)`, but only for *modification* cases
registrationDate: [ withTimezone ],
invitesLeft: [ unsigned ]
}),
addIndex("karmaScore")
])
];

80
roadmap/test-schema-roundtrip.js

@ -0,0 +1,80 @@
"use strict";
let dummyMigrations = [
{ id: 1, operations: [
{ type: "createCollection", name: "users", operations: [
{ type: "createField", name: "username", fieldType: "string", required: true, attributes: {} },
{ type: "createField", name: "passwordHash", fieldType: "string", required: true, attributes: {} },
{ type: "createField", name: "emailAddress", fieldType: "string", required: false, attributes: {} },
{ type: "createField", name: "isActive", fieldType: "boolean", required: true, attributes: {} },
{ type: "createField", name: "registrationDate", fieldType: "date", required: true, attributes: { withTimezone: false }},
{ type: "createField", name: "invitesLeft", fieldType: "integer", required: true, attributes: {} },
]}
]},
{ id: 2, operations: [
{ type: "modifyCollection", name: "users", operations: [
{ type: "setFieldAttributes", name: "emailAddress", required: false, attributes: {} },
{ type: "setFieldAttributes", name: "isActive", required: true, attributes: {} },
{ type: "setFieldAttributes", name: "registrationDate", attributes: { withTimezone: true }},
{ type: "setFieldAttributes", name: "invitesLeft", attributes: { signed: false }},
]}
]},
];
let dummyItems = [{
username: "joepie91",
passwordHash: "foo",
emailAddress: "admin@cryto.net",
isActive: true,
registrationDate: new Date(),
invitesLeft: 100
}, {
username: "test",
passwordHash: "bar",
emailAddress: "test@cryto.net",
isActive: false,
registrationDate: new Date(),
invitesLeft: 0
}];
const reduceSchema = require("../src/schema/reducer");
const createRecordCoder = require("../src/storage-encoder/record-coder");
let schema = reduceSchema(dummyMigrations);
let orderedTableSchema = Object.entries(schema.tables.users.fields)
.map(([ name, settings ]) => {
let { fieldType, ... rest } = settings;
return { name, type: fieldType, ... rest };
})
.sort((a, b) => {
if (b.name < a.name) {
return 1;
} else if (b.name > a.name) {
return -1;
} else {
return 0;
}
});
let tableEncoder = createRecordCoder(orderedTableSchema);
let encodedItems = dummyItems.map((item) => tableEncoder.encode(item));
let decodedItems = encodedItems.map((item) => tableEncoder.decode(item.record));
console.log(tableEncoder);
console.log("# Schema:");
console.dir(schema, { depth: null });
console.log("# Input items:");
console.dir(dummyItems, { depth: null });
console.log("# Encoded items:");
console.dir(encodedItems, { depth: null });
console.log("# Decoded items:");
console.dir(decodedItems, { depth: null });
// MARKER: Auxiliary blob handling, somehow
// MARKER: Implement support for optional fields in record-coder; including encoding the presence mask into the encoded records

49
shell.nix

@ -0,0 +1,49 @@
{ pkgs ? import <nixpkgs> {} }:
with pkgs;
let
version = "5.1.1";
os = "linux";
architecture = "amd64";
binaryPackage = meta: stdenv.mkDerivation ({
phases = "unpackPhase installPhase fixupPhase";
installPhase = ''
mkdir -p $out/bin
cp -r * $out/bin/
'';
sourceRoot = ".";
nativeBuildInputs = [ autoPatchelfHook ];
} // meta);
serverPackage = binaryPackage {
name = "tikv-server-${version}";
src = fetchurl {
url = "https://tiup-mirrors.pingcap.com/tikv-v${version}-${os}-${architecture}.tar.gz";
sha256 = "0sl6bhy7irvk48pss2bmmnl4yflxkpi8kfl8hg09bk7a8dqjqfcy";
};
};
pdPackage = binaryPackage {
name = "tikv-pd-${version}";
src = fetchurl {
url = "https://tiup-mirrors.pingcap.com/pd-v${version}-${os}-${architecture}.tar.gz";
sha256 = "1mzkbnid4kzxysnnkngvdqxfxvdcm718j248181zax1rl0x313ps";
};
};
ctlPackage = binaryPackage {
name = "tikv-ctl-${version}";
src = fetchurl {
url = "https://tiup-mirrors.pingcap.com/ctl-v${version}-${os}-${architecture}.tar.gz";
sha256 = "0g8wkqqyi8zvh3zfslyzf0c1nijw7maqlp99lrfw6vql4k3wn6b1";
};
};
in stdenv.mkDerivation rec {
name = "zapdb-kv-env";
buildInputs = [
serverPackage
pdPackage
ctlPackage
];
}

13
src/backend/lmdb.js

@ -0,0 +1,13 @@
"use strict";
const lmdb = require("lmdb");
function prefixSearch(...) {
...
}
module.exports = function createLMDBBackend() {
return {
};
};

475
src/data/timezone-names.js

@ -0,0 +1,475 @@
"use strict";
// DO NOT reorder this list! It is used to determine the internal ID for each timezone name, and changing the order will break parsing!
module.exports = [
"Africa/Abidjan",
"Africa/Accra",
"Africa/Addis_Ababa",
"Africa/Algiers",
"Africa/Asmara",
"Africa/Bamako",
"Africa/Bangui",
"Africa/Banjul",
"Africa/Bissau",
"Africa/Blantyre",
"Africa/Brazzaville",
"Africa/Bujumbura",
"Africa/Cairo",
"Africa/Casablanca",
"Africa/Ceuta",
"Africa/Conakry",
"Africa/Dakar",
"Africa/Dar_es_Salaam",
"Africa/Djibouti",
"Africa/Douala",
"Africa/El_Aaiun",
"Africa/Freetown",
"Africa/Gaborone",
"Africa/Harare",
"Africa/Johannesburg",
"Africa/Juba",
"Africa/Kampala",
"Africa/Khartoum",
"Africa/Kigali",
"Africa/Kinshasa",
"Africa/Lagos",
"Africa/Libreville",
"Africa/Lome",
"Africa/Luanda",
"Africa/Lubumbashi",
"Africa/Lusaka",
"Africa/Malabo",
"Africa/Maputo",
"Africa/Maseru",
"Africa/Mbabane",
"Africa/Mogadishu",
"Africa/Monrovia",
"Africa/Nairobi",
"Africa/Ndjamena",
"Africa/Niamey",
"Africa/Nouakchott",
"Africa/Ouagadougou",
"Africa/Porto-Novo",
"Africa/Sao_Tome",
"Africa/Timbuktu",
"Africa/Tripoli",
"Africa/Tunis",
"Africa/Windhoek",
"America/Adak",
"America/Anchorage",
"America/Anguilla",
"America/Antigua",
"America/Araguaina",
"America/Argentina/Buenos_Aires",
"America/Argentina/Catamarca",
"America/Argentina/ComodRivadavia",
"America/Argentina/Cordoba",
"America/Argentina/Jujuy",
"America/Argentina/La_Rioja",
"America/Argentina/Mendoza",
"America/Argentina/Rio_Gallegos",
"America/Argentina/Salta",
"America/Argentina/San_Juan",
"America/Argentina/San_Luis",
"America/Argentina/Tucuman",
"America/Argentina/Ushuaia",
"America/Aruba",
"America/Asuncion",
"America/Atikokan",
"America/Bahia",
"America/Bahia_Banderas",
"America/Barbados",
"America/Belem",
"America/Belize",
"America/Blanc-Sablon",
"America/Boa_Vista",
"America/Bogota",
"America/Boise",
"America/Cambridge_Bay",
"America/Campo_Grande",
"America/Cancun",
"America/Caracas",
"America/Cayenne",
"America/Cayman",
"America/Chicago",
"America/Chihuahua",
"America/Coral_Harbour",
"America/Costa_Rica",
"America/Creston",
"America/Cuiaba",
"America/Curacao",
"America/Danmarkshavn",
"America/Dawson",
"America/Dawson_Creek",
"America/Denver",
"America/Detroit",
"America/Dominica",
"America/Edmonton",
"America/Eirunepe",
"America/El_Salvador",
"America/Ensenada",
"America/Fortaleza",
"America/Fort_Nelson",
"America/Glace_Bay",
"America/Goose_Bay",
"America/Grand_Turk",
"America/Grenada",
"America/Guadeloupe",
"America/Guatemala",
"America/Guayaquil",
"America/Guyana",
"America/Halifax",
"America/Havana",
"America/Hermosillo",
"America/Indiana/Indianapolis",
"America/Indiana/Knox",
"America/Indiana/Marengo",
"America/Indiana/Petersburg",
"America/Indiana/Tell_City",
"America/Indiana/Vevay",
"America/Indiana/Vincennes",
"America/Indiana/Winamac",
"America/Inuvik",
"America/Iqaluit",
"America/Jamaica",
"America/Juneau",
"America/Kentucky/Louisville",
"America/Kentucky/Monticello",
"America/La_Paz",
"America/Lima",
"America/Los_Angeles",
"America/Maceio",
"America/Managua",
"America/Manaus",
"America/Martinique",
"America/Matamoros",
"America/Mazatlan",
"America/Menominee",
"America/Merida",
"America/Metlakatla",
"America/Mexico_City",
"America/Miquelon",
"America/Moncton",
"America/Monterrey",
"America/Montevideo",
"America/Montreal",
"America/Montserrat",
"America/Nassau",
"America/New_York",
"America/Nipigon",
"America/Nome",
"America/Noronha",
"America/North_Dakota/Beulah",
"America/North_Dakota/Center",
"America/North_Dakota/New_Salem",
"America/Nuuk",
"America/Ojinaga",
"America/Panama",
"America/Pangnirtung",
"America/Paramaribo",
"America/Phoenix",
"America/Port-au-Prince",
"America/Port_of_Spain",
"America/Porto_Velho",
"America/Puerto_Rico",
"America/Punta_Arenas",
"America/Rainy_River",
"America/Rankin_Inlet",
"America/Recife",
"America/Regina",
"America/Resolute",
"America/Rio_Branco",
"America/Rosario",
"America/Santarem",
"America/Santiago",
"America/Santo_Domingo",
"America/Sao_Paulo",
"America/Scoresbysund",
"America/Sitka",
"America/St_Johns",
"America/St_Kitts",
"America/St_Lucia",
"America/St_Thomas",
"America/St_Vincent",
"America/Swift_Current",
"America/Tegucigalpa",
"America/Thule",
"America/Thunder_Bay",
"America/Tijuana",
"America/Toronto",
"America/Tortola",
"America/Vancouver",
"America/Whitehorse",
"America/Winnipeg",
"America/Yakutat",
"America/Yellowknife",
"Antarctica/Casey",
"Antarctica/Davis",
"Antarctica/DumontDUrville",
"Antarctica/Macquarie",
"Antarctica/Mawson",
"Antarctica/McMurdo",
"Antarctica/Palmer",
"Antarctica/Rothera",
"Antarctica/Syowa",
"Antarctica/Troll",
"Antarctica/Vostok",
"Asia/Aden",
"Asia/Almaty",
"Asia/Amman",
"Asia/Anadyr",
"Asia/Aqtau",
"Asia/Aqtobe",
"Asia/Ashgabat",
"Asia/Atyrau",
"Asia/Baghdad",
"Asia/Bahrain",
"Asia/Baku",
"Asia/Bangkok",
"Asia/Barnaul",
"Asia/Beirut",
"Asia/Bishkek",
"Asia/Brunei",
"Asia/Chita",
"Asia/Choibalsan",
"Asia/Chongqing",
"Asia/Colombo",
"Asia/Damascus",
"Asia/Dhaka",
"Asia/Dili",
"Asia/Dubai",
"Asia/Dushanbe",
"Asia/Famagusta",
"Asia/Gaza",
"Asia/Hanoi",
"Asia/Harbin",
"Asia/Hebron",
"Asia/Ho_Chi_Minh",
"Asia/Hong_Kong",
"Asia/Hovd",
"Asia/Irkutsk",
"Asia/Jakarta",
"Asia/Jayapura",
"Asia/Jerusalem",
"Asia/Kabul",
"Asia/Kamchatka",
"Asia/Karachi",
"Asia/Kashgar",
"Asia/Kathmandu",
"Asia/Khandyga",
"Asia/Kolkata",
"Asia/Krasnoyarsk",
"Asia/Kuala_Lumpur",
"Asia/Kuching",
"Asia/Kuwait",
"Asia/Macau",
"Asia/Magadan",
"Asia/Makassar",
"Asia/Manila",
"Asia/Muscat",
"Asia/Nicosia",
"Asia/Novokuznetsk",
"Asia/Novosibirsk",
"Asia/Omsk",
"Asia/Oral",
"Asia/Phnom_Penh",
"Asia/Pontianak",
"Asia/Pyongyang",
"Asia/Qatar",
"Asia/Qostanay",
"Asia/Qyzylorda",
"Asia/Riyadh",
"Asia/Sakhalin",
"Asia/Samarkand",
"Asia/Seoul",
"Asia/Shanghai",
"Asia/Singapore",
"Asia/Srednekolymsk",
"Asia/Taipei",
"Asia/Tashkent",
"Asia/Tbilisi",
"Asia/Tehran",
"Asia/Tel_Aviv",
"Asia/Thimphu",
"Asia/Tokyo",
"Asia/Tomsk",
"Asia/Ulaanbaatar",
"Asia/Urumqi",
"Asia/Ust-Nera",
"Asia/Vientiane",
"Asia/Vladivostok",
"Asia/Yakutsk",
"Asia/Yangon",
"Asia/Yekaterinburg",
"Asia/Yerevan",
"Atlantic/Azores",
"Atlantic/Bermuda",
"Atlantic/Canary",
"Atlantic/Cape_Verde",
"Atlantic/Faroe",
"Atlantic/Jan_Mayen",
"Atlantic/Madeira",
"Atlantic/Reykjavik",
"Atlantic/South_Georgia",
"Atlantic/Stanley",
"Atlantic/St_Helena",
"Australia/Adelaide",
"Australia/Brisbane",
"Australia/Broken_Hill",
"Australia/Currie",
"Australia/Darwin",
"Australia/Eucla",
"Australia/Hobart",
"Australia/Lindeman",
"Australia/Lord_Howe",
"Australia/Melbourne",
"Australia/Perth",
"Australia/Sydney",
"CET",
"CST6CDT",
"EET",
"EST",
"EST5EDT",
"Etc/GMT",
"Etc/GMT+1",
"Etc/GMT-1",
"Etc/GMT+10",
"Etc/GMT-10",
"Etc/GMT+11",
"Etc/GMT-11",
"Etc/GMT+12",
"Etc/GMT-12",
"Etc/GMT-13",
"Etc/GMT-14",
"Etc/GMT+2",
"Etc/GMT-2",
"Etc/GMT+3",
"Etc/GMT-3",
"Etc/GMT+4",
"Etc/GMT-4",
"Etc/GMT+5",
"Etc/GMT-5",
"Etc/GMT+6",
"Etc/GMT-6",
"Etc/GMT+7",
"Etc/GMT-7",
"Etc/GMT+8",
"Etc/GMT-8",
"Etc/GMT+9",
"Etc/GMT-9",
"Etc/UTC",
"Europe/Amsterdam",
"Europe/Andorra",
"Europe/Astrakhan",
"Europe/Athens",
"Europe/Belfast",
"Europe/Belgrade",
"Europe/Berlin",
"Europe/Brussels",
"Europe/Bucharest",
"Europe/Budapest",
"Europe/Chisinau",
"Europe/Copenhagen",
"Europe/Dublin",
"Europe/Gibraltar",
"Europe/Guernsey",
"Europe/Helsinki",
"Europe/Isle_of_Man",
"Europe/Istanbul",
"Europe/Jersey",
"Europe/Kaliningrad",
"Europe/Kiev",
"Europe/Kirov",
"Europe/Lisbon",
"Europe/Ljubljana",
"Europe/London",
"Europe/Luxembourg",
"Europe/Madrid",
"Europe/Malta",
"Europe/Minsk",
"Europe/Monaco",
"Europe/Moscow",
"Europe/Oslo",
"Europe/Paris",
"Europe/Prague",
"Europe/Riga",
"Europe/Rome",
"Europe/Samara",
"Europe/Sarajevo",
"Europe/Saratov",
"Europe/Simferopol",
"Europe/Skopje",
"Europe/Sofia",
"Europe/Stockholm",
"Europe/Tallinn",
"Europe/Tirane",
"Europe/Tiraspol",
"Europe/Ulyanovsk",
"Europe/Uzhgorod",
"Europe/Vaduz",
"Europe/Vienna",
"Europe/Vilnius",
"Europe/Volgograd",
"Europe/Warsaw",
"Europe/Zagreb",
"Europe/Zaporozhye",
"Europe/Zurich",
"Factory",
"HST",
"Indian/Antananarivo",
"Indian/Chagos",
"Indian/Christmas",
"Indian/Cocos",
"Indian/Comoro",
"Indian/Kerguelen",
"Indian/Mahe",
"Indian/Maldives",
"Indian/Mauritius",
"Indian/Mayotte",
"Indian/Reunion",
"MET",
"MST",
"MST7MDT",
"Pacific/Apia",
"Pacific/Auckland",
"Pacific/Bougainville",
"Pacific/Chatham",
"Pacific/Chuuk",
"Pacific/Easter",
"Pacific/Efate",
"Pacific/Enderbury",
"Pacific/Fakaofo",
"Pacific/Fiji",
"Pacific/Funafuti",
"Pacific/Galapagos",
"Pacific/Gambier",
"Pacific/Guadalcanal",
"Pacific/Guam",
"Pacific/Honolulu",
"Pacific/Johnston",
"Pacific/Kiritimati",
"Pacific/Kosrae",
"Pacific/Kwajalein",
"Pacific/Majuro",
"Pacific/Marquesas",
"Pacific/Midway",
"Pacific/Nauru",
"Pacific/Niue",
"Pacific/Norfolk",
"Pacific/Noumea",
"Pacific/Pago_Pago",
"Pacific/Palau",
"Pacific/Pitcairn",
"Pacific/Pohnpei",
"Pacific/Port_Moresby",
"Pacific/Rarotonga",
"Pacific/Saipan",
"Pacific/Tahiti",
"Pacific/Tarawa",
"Pacific/Tongatapu",
"Pacific/Wake",
"Pacific/Wallis",
"PST8PDT",
"WET",
];

8
src/dev/print-bits.js

@ -0,0 +1,8 @@
"use strict";
module.exports = function printBits(value) {
// FIXME: Verify that this also works for unsigned values!
let bits = BigInt(value).toString(2);
let padSize = Math.ceil(bits.length / 8) * 8;
return bits.padStart(padSize, "0");
}

7
src/generate-id.js

@ -0,0 +1,7 @@
"use strict";
const crypto = require("crypto");
module.exports = function generateID() {
return crypto.randomBytes(12);
};

56
src/index.js

@ -0,0 +1,56 @@
"use strict";
const unreachable = require("@joepie91/unreachable")("zapdb");
const matchValue = require("match-value");
// const createLMDBBackend = require("./backend/lmdb");
const queryBuilder = require("./query-builder");
// TODO: Type decoding hook for eg. turning decimal strings into bigints or numbers
Object.assign(module.exports, queryBuilder);
function findNodeType(nodes, type) {
let nodeIndex = nodes.findIndex((node) => node.type === type);
if (nodeIndex != null) {
return nodes[nodeIndex];
} else {
throw new Error(`Failed to locate expected '${type}' node`);
}
}
let temporaryHardcodedSchema = [
{ name: "_id", type: "bytes", required: true },
{ name: "username", type: "string", required: true },
{ name: "email", type: "string", required: true },
{ name: "activated", type: "boolean", required: true },
{ name: "notes", type: "string", required: false }
];
module.exports.createClient = function(options) {
let { schema, backend } = options;
function insertQuery(query, parameters) {
let items = findNodeType(query.clauses, "items").items;
console.log(items);
}
function selectQuery(query, parameters) {}
function updateQuery(query, parameters) {}
function deleteQuery(query, parameters) {}
return {
query: function (query, parameters) {
matchValue(query.type, {
insert: () => insertQuery(query, parameters),
update: () => updateQuery(query, parameters),
delete: () => deleteQuery(query, parameters),
select: () => selectQuery(query, parameters),
});
},
transaction: function (callback) {
}
};
};

112
src/intersect-many-lists.js

@ -0,0 +1,112 @@
"use strict";
const timeCall = require("time-call");
const syncpipe = require("syncpipe");
// let list1 = [ 2, 9, 10, 12, 13, 16, 19, 21, 23, 24, 33, 43, 46, 48, 49, 58, 60, 61, 69, 71, 74, 75, 78, 79, 80, 82, 85, 86, 88, 90, 91, 92, 95, 98, 99 ];
// let list2 = [ 3, 4, 5, 9, 10, 12, 13, 14, 16, 17, 19, 23, 24, 28, 29, 31, 32, 34, 37, 41, 42, 44, 45, 48, 50, 51, 52, 55, 56, 64, 69, 75, 77, 79, 85, 87, 91, 92, 93, 94, 98 ];
// let list3 = [ 2, 5, 8, 9, 15, 23, 27, 31, 32, 33, 34, 36, 37, 40, 43, 45, 53, 54, 56, 58, 60, 63, 64, 66, 71, 72, 74, 75, 78, 84, 89, 91, 94, 96, 97, 98, 99 ];
function randomIntegers(count, limit) {
return syncpipe(new Array(count), [
_ => _.fill(0),
_ => _.map(() => Math.ceil(Math.random() * limit)),
_ => new Set(_),
_ => Array.from(_),
_ => _.sort((a, b) => a - b)
]);
}
let list1 = randomIntegers(1000, 2000);
let list2 = randomIntegers(1000, 2000);
let list3 = randomIntegers(1000, 2000);
console.log(list1);
function intersectThree(list1, list2, list3) {
let pointer1 = 0;
let pointer2 = 0;
let pointer3 = 0;
let results = [];
while (pointer1 < list1.length && pointer2 < list2.length && pointer3 < list3.length) {
let value1 = list1[pointer1];
let value2 = list2[pointer2];
let value3 = list3[pointer3];
if (value1 === value2 && value1 === value3) {
results.push(value1);
pointer1++;
pointer2++;
pointer3++;
} else {
let lowest = Math.min(value1, value2, value3);
if (value1 === lowest) { pointer1++; }
if (value2 === lowest) { pointer2++; }
if (value3 === lowest) { pointer3++; }
}
}
return results;
}
function intersectSets(list1, list2, list3) {
let set2 = new Set(list2);
let set3 = new Set(list3);
return list1.filter((value) => set2.has(value) && set3.has(value));
}
function intersectSets2(list1, list2, list3) {
let set2 = new Set(list2);
let set3 = new Set(list3);
let results = [];
for (let value of list1) {
if (set2.has(value) && set3.has(value)) {
results.push(value);
}
}
return results;
}
function tryOut(ITERATIONS) {
console.log(`# ${ITERATIONS} iterations, time is per iteration`);
let result1 = timeCall(() => {
for (let i = 0; i < ITERATIONS; i++) {
intersectThree(list1, list2, list3);
}
});
let result2 = timeCall(() => {
for (let i = 0; i < ITERATIONS; i++) {
intersectSets(list1, list2, list3);
}
});
let result3 = timeCall(() => {
for (let i = 0; i < ITERATIONS; i++) {
intersectSets2(list1, list2, list3);
}
});
console.log({
pointer: result1.time / ITERATIONS / 1e3 + "us",
setsFilter: result2.time / ITERATIONS / 1e3 + "us",
setsFor: result3.time / ITERATIONS / 1e3 + "us"
});
}
tryOut(100);
tryOut(1000);
tryOut(10000);
tryOut(100000);
// console.log(intersectThree(list1, list2, list3));

19
src/lookup-timezone-name.js

@ -0,0 +1,19 @@
"use strict";
const syncpipe = require("syncpipe");
const timezoneNames = require("./data/timezone-names");
let inverseMapping = syncpipe(timezoneNames, [
_ => _.map((name, i) => [ name, i ]),
_ => new Map(_)
]);
module.exports = function lookupTimezoneName(name) {
if (inverseMapping.has(name)) {
return inverseMapping.get(name);
} else {
// FIXME: Error type, clearer instructions for end users since this may also happen when our timezone list is outdated
throw new Error(`Unknown timezone name`);
}
};

67
src/packages/arithmetic-coder-bitmask/index.js

@ -0,0 +1,67 @@
"use strict";
const createArithmeticCoder = require("../arithmetic-coder");
function encodeBooleans(booleans) {
let n = 1n;
let bitmask = 0n;
for (let boolean of booleans) {
if (boolean === true) {
bitmask |= n;
}
n *= 2n;
}
return bitmask;
}
function decodeBooleans(bitmask, count) {
let n = 1n;
let booleans = [];
for (let i = 0; i < count; i++) {
booleans.push((bitmask & n) !== 0n);
n *= 2n;
}
return booleans;
}
module.exports = function createBitmaskArithmeticCoder(fields) {
// NOTE: We *always* store the bitmask as the very first field, to ensure that it doesn't interfere with binary sorting order
let fieldCount = BigInt(fields.length);
let maximumBitmaskValue = 2n ** fieldCount; // NOTE: Exclusive
let coder = createArithmeticCoder([
{ name: "__bitmask", minimum: 0, maximum: maximumBitmaskValue },
... fields
]);
return {
bits: coder.bits,
encode: function (data) {
let fieldPresence = fields.map((field) => data[field.name] != null);
return coder.encode({
... data,
__bitmask: encodeBooleans(fieldPresence)
});
},
decode: function (data) {
let decoded = coder.decode(data);
let fieldPresence = decodeBooleans(decoded.__bitmask, fields.length);
fields.forEach((field, i) => {
if (fieldPresence[i] === false) {
decoded[field.name] = undefined;
}
});
delete decoded.__bitmask;
return decoded;
}
};
};

83
src/packages/arithmetic-coder/index.js

@ -0,0 +1,83 @@
"use strict";
const assert = require("assert");
const bigintLog2 = require("@extra-bigint/log2");
function bitsNeeded(value) {
if (value === 0) {
return 1n;
} else {
return bigintLog2(value) + 1n;
}
}
function remainderDivide(number, divideBy) {
let remainder = number % divideBy;
let wholes = (number - remainder) / divideBy;
return [ wholes, remainder ];
}
module.exports = function createArithmeticCoder(fields) {
// NOTE: The fields are order-sensitive! You can *only* add a field to the definition afterwards without breaking decoding of existing values, if you put that new field at the *end*. Ranges of existing fields should never be changed, as this will break decoding.
// NOTE: Minimum is inclusive, maximum is exclusive
// NOTE: For binary sortability, the fields should be ordered from least to most significant
// second, ..., day, ... year, mask, timezone
let nextMultiplier = 1n;
let processedFields = fields.map((field) => {
let minimum = BigInt(field.minimum);
let maximum = BigInt(field.maximum);
let range = maximum - minimum;
let processed = {
offset: minimum,
range: range,
minimum: minimum,
maximum: maximum,
multiplier: nextMultiplier,
name: field.name
};
nextMultiplier = nextMultiplier * range;
return processed;
});
let maximumValue = nextMultiplier;
let reverseFields = processedFields.slice().reverse();
return {
bits: bitsNeeded(maximumValue - 1n),
encode: function (data) {
let number = processedFields.reduce((total, field) => {
let value = data[field.name];
if (value != null) {
let valueN = BigInt(value);
assert(valueN >= field.minimum && valueN < field.maximum);
let normalized = valueN - field.offset;
return total + (normalized * field.multiplier);
} else {
// Effectively store a 0, and assume that the calling code deals with any requiredness constraints and understands how to handle this case
return total;
}
}, 0n);
return number;
},
decode: function (number) {
let result = {};
for (let field of reverseFields) {
let [ wholes, remainder ] = remainderDivide(number, field.multiplier);
number = remainder;
result[field.name] = wholes + field.offset;
}
return result;
}
};
};

50
src/packages/immutable-deep-merge/index.js

@ -0,0 +1,50 @@
"use strict";
module.exports = function immutableDeepMerge(object1, object2) {
let hasChanges = false;
let changedProperties = { };
let deletedProperties = [];
if (object2 != null) {
for (let key of Object.keys(object2)) {
let value = object2[key];
let originalValue = object1[key];
if (value === Delete) {
deletedProperties.push(key);
hasChanges = true;
} else {
let transformedValue;
let normalizedValue = (typeof value === "function")
? value(originalValue)
: value;
if (typeof normalizedValue === "object" && normalizedValue !== null) {
// NOTE: We default to an empty object for the original value because from the perspective of a deep-merge, any nested paths required by the new input that don't exist in the original input should be imagined into existence.
transformedValue = immutableDeepMerge(originalValue ?? {}, normalizedValue);
} else {
transformedValue = normalizedValue;
}
changedProperties[key] = transformedValue;
if (transformedValue !== originalValue) {
hasChanges = true;
}
}
}
}
if (hasChanges) {
let merged = { ... object1, ... changedProperties };
for (let property of deletedProperties) {
delete merged[property];
}
return merged;
} else {
return object1;
}
};

15
src/packages/named-cartesian-product/index.js

@ -0,0 +1,15 @@
"use strict";
const cartesianProduct = require("cartesian-product");
module.exports = function namedCartesianProduct(object) {
let keys = Object.keys(object);
let products = cartesianProduct(keys.map((key) => object[key]));
return products.map((values) => {
return Object.fromEntries(keys.map((key, i) => {
return [ key, values[i] ];
}));
});
};

150
src/packages/orderable-varint/index.js

@ -0,0 +1,150 @@
"use strict";
const log2 = require("@extra-bigint/log2"); // TODO: Do we not need this anymore?
const assert = require("assert");
const countBits = require("../../storage-encoder/bitwise/count-bits");
const generateMask = require("../../storage-encoder/bitwise/generate-mask");
const invertBits = require("../../storage-encoder/bitwise/invert");
const invertBits1Byte = require("../../storage-encoder/bitwise/invert-1byte");
const truncateLeftBits = require("../../storage-encoder/bitwise/truncate-left-bits");
const bigintBuffer = require("../../storage-encoder/bigint/buffer");
const absBigInt = require("../../storage-encoder/bigint/abs");
function isNegativeHeaderByte(value) {
return (value & 128) === 0;
}
function calculateEncodedSize(value) {
let valueBitsNeeded = countBits(value);
let valueBytesNeeded = Math.ceil(valueBitsNeeded / 8);
let sizeBitsNeeded = valueBytesNeeded + 1;
// We loop here because the addition of a header can actually bump up the needed amount of bits in some cases. It should never be needed more than 3 times, though.
// FIXME: Add a limit and an error when it's exceeded
while (true) {
let totalBitsNeeded = valueBitsNeeded + sizeBitsNeeded;
let totalBytesNeeded = Math.ceil(totalBitsNeeded / 8);
if (sizeBitsNeeded === totalBytesNeeded + 1) {
return {
totalBytes: BigInt(totalBytesNeeded),
valueBits: BigInt(valueBitsNeeded)
};
} else {
sizeBitsNeeded = totalBytesNeeded + 1;
}
}
}
function readByteCount(value) {
if (value < 128) {
// 0xxxxxxx, this should never happen in the first byte!
return 0;
} else if (value & 128 && value < 192) {
// 10xxxxxx
return 1;
} else if (value & 192 && value < 224) {
// 110xxxxx
return 2;
} else if (value & 224 && value < 240) {
// 1110xxxx
return 3;
} else if (value & 240 && value < 248) {
// 11110xxx
return 4;
} else if (value & 248 && value < 252) {
// 111110xx
return 5;
} else if (value & 252 && value < 254) {
// 1111110x
return 6;
} else if (value === 254) {
// 11111110
return 7;
} else {
// 11111111
return 8;
}
}
function readBytes(bytes) {
assert(bytes.length > 0);
let negative = isNegativeHeaderByte(bytes[0]);
let headerRead = false;
let i = 0;
let totalByteCount = 0;
let value = 0n;
while (!headerRead || (i < totalByteCount)) {
let byte = bytes[i];
// If the first byte has a negative sign bit, invert the bits so that we can use the same byte count parsing logic for both negative and positive values
let normalizedByte = (negative)
? invertBits1Byte(byte)
: byte;
let byteValue;
if (!headerRead) {
let byteCount = readByteCount(normalizedByte);
totalByteCount += byteCount;
if (byteCount === 8) {
// Continue reading header bytes
continue;
} else {
if (totalByteCount === 0) {
throw new Error(`Found a 0-byte value, this should never happen`);
}
headerRead = true;
byteValue = truncateLeftBits(normalizedByte, byteCount + 1); // truncate the byteCount bits and the terminator bit
}
} else {
value <<= 8n;
byteValue = normalizedByte;
}
if (negative) {
value -= BigInt(byteValue);
} else {
value += BigInt(byteValue);
}
i++;
}
if (!headerRead) {
throw new Error(`Reached end of value while reading header`);
}
return {
value: value,
bytesRead: totalByteCount
};
}
module.exports = {
encode: function encodeOrderableVarint(value) {
let valueN = BigInt(value);
let absoluteValue = absBigInt(valueN);
// NOTE: totalBytes represents both the total size in bytes of the encoded value, *and* the amount of header bits (minus the terminator)
let { totalBytes, valueBits } = calculateEncodedSize(absoluteValue);
let headerBits = generateMask(totalBytes);
// Since the terminator bit is already accounted for in the size calculation, we don't need to do anything special for it here - it'll be void space between the header and the value by definition
let header = headerBits << (totalBytes * 7n);
let encodedValue = header + absoluteValue;
if (valueN < 0) {
encodedValue = invertBits(encodedValue);
}
return bigintBuffer.toBuffer(encodedValue);
},
decode: function decodeOrderableVarint(bytes) {
return readBytes(bytes);
}
};

52
src/packages/tape-snapshot/index.js

@ -0,0 +1,52 @@
"use strict";
const fs = require("fs");
const path = require("path");
const cbor = require("cbor");
function sanitizeTestName(name) {
return name.replace(/[\/\\\s]+/g, "-");
}
function serializeValue(value) {
return cbor.encodeOne(value, { highWaterMark: 1e8 });
}
function deserializeValue(value) {
return cbor.decodeFirstSync(value);
}
module.exports = {
setup: function (tape, snapshotsRoot) {
Object.assign(tape.Test.prototype, {
equalsSnapshot: function (value, id) {
let testName = sanitizeTestName(this.name);
let snapshotNumber = (this.__lastSnapshot ?? 0) + 1;
this.__lastSnapshot = snapshotNumber;
let fullTestName = (id != null)
? `${testName}-${id}`
: `${testName}-snapshot-${snapshotNumber}`
let snapshotPath = path.resolve(snapshotsRoot, `${fullTestName}.cbor`);
let serializedValue = serializeValue(value);
if (process.env.UPDATE_SNAPSHOT === "ALL" || process.env.UPDATE_SNAPSHOT === fullTestName) {
fs.mkdirSync(snapshotsRoot, { recursive: true });
fs.writeFileSync(snapshotPath, serializedValue);
console.warn(`[!] Snapshot for '${fullTestName}' was updated`);
} else if (fs.existsSync(snapshotPath)) {
// NOTE: To ensure that the replacer transforms are applied to *both* values, we *always* serialize the current value even when we're just comparing it against a known one; we then just deserialize it again below.
// TODO: Investigate whether this can be optimized with a recursive object transform instead
let knownValue = deserializeValue(fs.readFileSync(snapshotPath));
let deserializedValue = deserializeValue(serializedValue);
this.deepEquals(deserializedValue, knownValue, `Snapshot for '${fullTestName}' does not match; re-run with UPDATE_SNAPSHOT=${fullTestName} to update the snapshot and mark the current result as valid`);
} else {
throw new Error(`No known snapshot for '${fullTestName}'; re-run with UPDATE_SNAPSHOT=${fullTestName} to create it automatically`);
}
}
});
}
};

161
src/query/methods/v1.js

@ -0,0 +1,161 @@
"use strict";
const assureArray = require("assure-array");
const assert = require("assert");
// moreThan, lessThan, equals, not, where, select, insert, update (set), delete, collapse, collapseBy, count, parameter
module.exports = {
insertInto: function (collection, clauses) {
return {
type: "insert",
collection: collection,
clauses: assureArray(clauses)
};
},
update: function (collection, clauses) {
return {
type: "insert",
collection: collection,
clauses: assureArray(clauses)
};
},
selectFrom: function (collection, clauses) {
return {
type: "select",
collection: collection,
clauses: assureArray(clauses)
};
},
deleteFrom: function (collection, clauses) {
return {
type: "delete",
collection: collection,
clauses: assureArray(clauses)
};
},
moreThan: function (value) {
return {
type: "moreThan",
value: value
};
},
lessThan: function (value) {
return {
type: "lessThan",
value: value
};
},
equals: function (value) {
return {
type: "equals",
value: value
};
},
not: function (value) {
return {
type: "not",
value: value
};
},
where: function (conditions) {
return {
type: "where",
conditions: conditions
};
},
collapse: function (reducers) {
return {
type: "collapse",
fields: null,
reducers: reducers
};
},
collapseBy: function (fields, reducers) {
return {
type: "collapse",
fields: assureArray(fields),
reducers: reducers
};
},
item: function (item) {
return {
type: "items",
items: [ item ]
};
},
items: function (items) {
return {
type: "items",
items: items
};
},
set: function (properties) {
return {