@ -11,12 +11,17 @@ const isString = require("@validatem/is-string");
const isBoolean = require ( "@validatem/is-boolean" ) ;
const isFunction = require ( "@validatem/is-function" ) ;
const isNumber = require ( "@validatem/is-number" ) ;
const isDate = require ( "@validatem/is-date" ) ;
const arrayOf = require ( "@validatem/array-of" ) ;
const defaultTo = require ( "@validatem/default-to" ) ;
const anyProperty = require ( "@validatem/any-property" ) ;
const anything = require ( "@validatem/anything" ) ;
const ValidationError = require ( "@validatem/error" ) ;
const pipe = require ( "@promistream/pipe" ) ;
const combineSequentialStreaming = require ( "@promistream/combine-sequential-streaming" ) ;
const fromIterable = require ( "@promistream/from-iterable" ) ;
const fromNodeStream = require ( "@promistream/from-node-stream" ) ;
const { addSeconds } = require ( "date-fns" ) ;
const syncpipe = require ( "syncpipe" ) ;
@ -32,20 +37,21 @@ function noop() {}
function taskResultsToObject ( taskResults ) {
return syncpipe ( taskResults , [
( _ ) => [ _ . taskName , _ . metadata ] ,
( _ ) => _ . map ( ( result ) => [ result . taskName , result . metadata ] ) ,
( _ ) => Object . fromEntries ( _ )
] ) ;
}
module . exports = function ( { db } ) {
return {
getItem : function ( tx , id ) {
// FIXME: Make object API instead
getItem : function ( tx , id , optional = false ) {
return Promise . try ( ( ) => {
return db . Alias . relatedQuery ( "item" , tx )
. for ( id )
. withGraphFetched ( "taskResults" ) ;
} ) . then ( ( results ) => {
if ( results. length > 0 ) {
if ( optional === true || results. length > 0 ) {
return results [ 0 ] ;
} else {
throw new Error ( ` No item exists with ID ' ${ id } ' ` ) ;
@ -148,7 +154,7 @@ module.exports = function ({ db }) {
} ) ;
return db . Alias . query ( tx )
. patch ( { itemId : to } )
. patch ( { itemId : to , updatedAt : new Date ( ) } )
. where ( { itemId : from } ) ;
} ,
mergeItem : function ( _tx , _options ) {
@ -167,77 +173,87 @@ module.exports = function ({ db }) {
} ) ;
return Promise . all ( [
this . getItem ( tx , { id : from } ) ,
this . getItem ( tx , { id : into } ) ,
] ) . then ( ( [ from , into ] ) => {
let newData = merge ( into . data , from . data ) ;
let fromTaskResults = taskResultsToObject ( from . taskResults ) ;
let intoTaskResults = taskResultsToObject ( into . taskResults ) ;
// FIXME: Deduplicate function
let allTaskKeys = Array . from ( new Set ( [
... Object . keys ( fromTaskResults ) ,
... Object . keys ( intoTaskResults )
] ) ) ;
function selectNewestResult ( taskA , taskB ) {
if ( taskA == null ) {
return taskB ;
} else if ( taskB == null ) {
return taskA ;
} else if ( taskA . updatedAt > taskB . updatedAt ) {
return taskA ;
} else {
return taskB ;
}
}
this . getItem ( tx , from , true ) ,
this . getItem ( tx , into , true ) ,
] ) . then ( ( [ fromObj , intoObj ] ) => {
if ( fromObj != null ) {
let defaultedIntoObj = defaultValue ( intoObj , {
id : into ,
data : { } ,
taskResults : [ ]
} ) ;
let newData = merge ( defaultedIntoObj . data , fromObj . data ) ;
let fromTaskResults = taskResultsToObject ( fromObj . taskResults ) ;
let intoTaskResults = taskResultsToObject ( defaultedIntoObj . taskResults ) ;
// TODO: Use merge-by-template here instead?
let newTaskResults = allTaskKeys . map ( ( key ) => {
let merger = mergeMetadata [ key ] ;
let fromTask = fromTaskResults [ key ] ;
let intoTask = intoTaskResults [ key ] ;
if ( merger != null ) {
// Generate a new TaskResult that includes data combined from both
let newMetadata = merger (
defaultValue ( intoTask . metadata , { } ) ,
defaultValue ( fromTask . metadata , { } )
) ;
return {
... intoTask ,
metadata : newMetadata ,
updatedAt : Date . now ( )
} ;
} else {
// Take the newest known TaskResult and just make sure that it is pointing at the correct ID
return {
... selectNewestResult ( intoTask , fromTask ) ,
itemId : into . id
} ;
// FIXME: Deduplicate function
let allTaskKeys = Array . from ( new Set ( [
... Object . keys ( fromTaskResults ) ,
... Object . keys ( intoTaskResults )
] ) ) ;
function selectNewestResult ( taskA , taskB ) {
if ( taskA == null ) {
return taskB ;
} else if ( taskB == null ) {
return taskA ;
} else if ( taskA . updatedAt > taskB . updatedAt ) {
return taskA ;
} else {
return taskB ;
}
}
} ) ;
let upsertOptions = {
insertMissing : true ,
noDelete : true
} ;
// TODO: Use merge-by-template here instead?
let newTaskResults = allTaskKeys . map ( ( key ) => {
let merger = mergeMetadata [ key ] ;
let fromTask = fromTaskResults [ key ] ;
let intoTask = intoTaskResults [ key ] ;
if ( merger != null ) {
// Generate a new TaskResult that includes data combined from both
let newMetadata = merger (
defaultValue ( intoTask . metadata , { } ) ,
defaultValue ( fromTask . metadata , { } )
) ;
return {
... intoTask ,
metadata : newMetadata ,
updatedAt : Date . now ( )
} ;
} else {
// Take the newest known TaskResult and just make sure that it is pointing at the correct ID
return {
... selectNewestResult ( intoTask , fromTask ) ,
itemId : defaultedIntoObj . id
} ;
}
} ) ;
return Promise . try ( ( ) => {
return into . $query ( tx ) . upsertGraph ( {
data : newData ,
taskResults : newTaskResults
} , upsertOptions ) ;
} ) . then ( ( ) => {
// NOTE: Repointing aliases has the side-effect of leaving a redirect from the source to the destination item, as each item has a self:self alias
return this . repointAliases ( tx , { from : from . id , to : into . id } ) ;
} ) . then ( ( ) => {
// NOTE: We don't use this.deleteItem, to sidestep any alias lookups
return db . Item . query ( tx ) . findById ( from . id ) . delete ( ) ;
} ) ;
let upsertOptions = {
insertMissing : true ,
noDelete : true
} ;
return Promise . try ( ( ) => {
// NOTE: Cannot use into.$query here because that adds an implicit query builder operation, which upsertGraph does not allow
return db . Item . query ( tx ) . upsertGraph ( {
id : defaultedIntoObj . id ,
data : newData ,
taskResults : newTaskResults
} , upsertOptions ) ;
} ) . then ( ( ) => {
// NOTE: Repointing aliases has the side-effect of leaving a redirect from the source to the destination item, as each item has a self:self alias
return this . repointAliases ( tx , { from : fromObj . id , to : intoObj . id } ) ;
} ) . then ( ( ) => {
// NOTE: We don't use this.deleteItem, to sidestep any alias lookups
return db . Item . query ( tx ) . findById ( fromObj . id ) . delete ( ) ;
} ) ;
}
} ) ;
} ,
deleteItem : function ( _tx , _options ) {
@ -265,7 +281,11 @@ module.exports = function ({ db }) {
} ]
} ) ;
let promise = db . Alias . query ( tx ) . insert ( { alias : from , itemId : to } ) ;
let promise = db . Alias . query ( tx ) . insert ( {
alias : from ,
itemId : to ,
updatedAt : new Date ( )
} ) ;
if ( failIfExists ) {
return promise ;
@ -281,6 +301,7 @@ module.exports = function ({ db }) {
} ]
} ) ;
// TODO: This cannot yet be propagated to the update feed, because we don't keep a record of deletions
return db . Alias . query ( tx ) . findById ( from ) . delete ( ) ;
} ,
updateData : function ( _tx , _options ) {
@ -379,6 +400,48 @@ module.exports = function ({ db }) {
} ) . then ( ( result ) => {
return result [ 0 ] . count ;
} ) ;
} ,
getUpdates : function ( _tx , _options ) {
// NOTE: This returns snake_cased keys! As we're bypassing the Objection internals, no casemapping occurs.
let [ tx , { timestamp , prefix } ] = validateArguments ( arguments , {
tx : [ required , isTX ] ,
options : [ defaultTo ( { } ) , {
timestamp : [ isDate ] ,
prefix : [ isString ]
} ]
} ) ;
function applyWhereClauses ( query , idField ) {
if ( timestamp != null ) {
query = query . whereRaw ( ` updated_at > ? ` , [ timestamp ] ) ;
}
if ( prefix != null ) {
query = query . whereRaw ( ` ${ idField } LIKE ? ` , [ ` ${ prefix . replace ( /%/g , "\\%" ) } % ` ] ) ;
}
return query ;
}
// FIXME/MARKER: Below query streams are all producing 0 items, why? Running them manually yields results.
function * streamGenerator ( ) {
yield fromNodeStream . fromReadable (
applyWhereClauses ( db . Item . query ( tx ) , "id" ) . toKnexQuery ( ) . stream ( )
) ;
yield fromNodeStream . fromReadable (
applyWhereClauses ( db . Alias . query ( tx ) , "item_id" ) . toKnexQuery ( ) . stream ( )
) ;
yield fromNodeStream . fromReadable (
applyWhereClauses ( db . TaskResult . query ( tx ) , "item_id" ) . toKnexQuery ( ) . stream ( )
) ;
}
return pipe ( [
fromIterable ( streamGenerator ( ) ) ,
combineSequentialStreaming ( )
] ) ;
}
} ;
} ;