Mike Gerwitz

Activist for User Freedom

aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMike Gerwitz <gerwitzm@lovullo.com>2017-02-21 14:19:31 -0500
committerMike Gerwitz <gerwitzm@lovullo.com>2017-02-21 14:19:31 -0500
commit68e929d2b8d562fd2bfe7dafc2187215c7ef2d72 (patch)
tree5bf3599ce6246477d51b3dfb6ddb83e18909f0c8
parent9225924e7e3d2b872f63986fcfc2d6e438de94c7 (diff)
parent62996102a87f17a5f3d9da54531e371e24f37510 (diff)
downloadliza-68e929d2b8d562fd2bfe7dafc2187215c7ef2d72.tar.gz
liza-68e929d2b8d562fd2bfe7dafc2187215c7ef2d72.tar.bz2
liza-68e929d2b8d562fd2bfe7dafc2187215c7ef2d72.zip
Validation monitoring fixes
This does two things: - Ensures previous validation requests are complete before processing another, preventing internal state from being screwed up; and - Prevents empty diffs from triggering staging bucket events, which is both a performance benefit and stops ValidStateMonitor from getting confused and immediately marking failures as fixed in certain circumstances.
-rw-r--r--src/bucket/DelayedStagingBucket.js186
-rw-r--r--src/bucket/QuoteDataBucket.js324
-rw-r--r--src/bucket/StagingBucket.js566
-rw-r--r--src/bucket/StagingBucketAutoDiscard.js103
-rw-r--r--src/client/ClientDependencyFactory.js8
-rw-r--r--src/validate/DataValidator.js87
-rw-r--r--test/bucket/StagingBucketTest.js185
-rw-r--r--test/validate/DataValidatorTest.js227
8 files changed, 1609 insertions, 77 deletions
diff --git a/src/bucket/DelayedStagingBucket.js b/src/bucket/DelayedStagingBucket.js
new file mode 100644
index 0000000..1d2ca92
--- /dev/null
+++ b/src/bucket/DelayedStagingBucket.js
@@ -0,0 +1,186 @@
+/**
+ * Delayed writing to staging bucket
+ *
+ * Copyright (C) 2017 LoVullo Associates, Inc.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+var Class = require( 'easejs' ).Class,
+ StagingBucket = require( './StagingBucket' );
+
+
+/**
+ * Holds changes until explicitly processed to avoid cascades
+ *
+ * Since each write could trigger any number of event listeners, writes
+ * should be queued and done en-masse.
+ */
+module.exports = Class( 'DelayedStagingBucket' )
+ .extend( StagingBucket,
+{
+ /**
+ * Queued data
+ * @type {Object}
+ */
+ 'private _queued': {},
+
+ /**
+ * Delay timer id
+ * @type {number}
+ */
+ 'private _timer': 0,
+
+
+ 'public override setValues': function( data, merge_index, merge_null )
+ {
+ for ( var name in data )
+ {
+ if ( merge_index )
+ {
+ if ( this._queued[ name ] === undefined )
+ {
+ this._queued[ name ] = [];
+ }
+
+ // merge individual indexes
+ this.merge( data[ name ], this._queued[ name ] );
+ }
+ else
+ {
+ // no index merge; replace any existing data
+ this._queued[ name ] = Array.prototype.slice.call(
+ data[ name ], 0
+ );
+
+ // this will ensure that no data will follow what we were
+ // provided
+ this._queued[ name ].push( null );
+ }
+ }
+
+ this._setTimer();
+ return this;
+ },
+
+
+ 'private _setTimer': function()
+ {
+ // no need to re-set timers
+ if ( this._timer )
+ {
+ return;
+ }
+
+ // invoke when stack clears
+ var _self = this;
+ this._timer = setTimeout( function()
+ {
+ _self.processValues();
+ }, 0 );
+ },
+
+
+ /**
+ * Retrieve the data that will result after a merge
+ *
+ * This should be used sparingly, since if this is called before data is
+ * actually merged into the bucket, then it is possible that the values will
+ * change after validations are run.
+ */
+ 'public getPendingDataByName': function( name, diff )
+ {
+ diff = diff || this._queued;
+
+ var pending = this.getDataByName['super'].call( this, name );
+ if ( !( this._queued[ name ] || diff[ name ] ) )
+ {
+ return pending;
+ }
+
+ // merge the queued data
+ this.merge( ( this._queued[ name ] || diff[ name ] ), pending, true );
+ return pending;
+ },
+
+
+ 'public override getDataByName': function( name )
+ {
+ // if enqueued data is requested, then we have no choice but to merge to
+ // ensure that the data is up-to-date
+ if ( this._queued[ name ] )
+ {
+ this.processValues();
+ }
+
+ return this.__super.call( this, name );
+ },
+
+
+ 'public override getData': function()
+ {
+ // gah!
+ var _s = this.__super;
+ this.processValues();
+ return _s.call( this );
+ },
+
+
+ 'public override each': function( c )
+ {
+ var _s = this.__super;
+ this.processValues();
+ return _s.call( this, c );
+ },
+
+
+ 'public override getFilledDiff': function()
+ {
+ var _s = this.__super;
+ this.processValues();
+ return _s.call( this );
+ },
+
+
+ 'public override hasIndex': function( name, i )
+ {
+ var _s = this.__super;
+ this.processValues();
+ return _s.call( this, name, i );
+ },
+
+
+ 'public processValues': function()
+ {
+ // if no timer is set, then we have no data
+ if ( !this._timer )
+ {
+ return this;
+ }
+
+ // since additional data may be queued as a consequence of the below
+ // set, prepare for it by providing an empty queue
+ var oldqueue = this._queued;
+ this._queued = {};
+ this._timer = 0;
+
+ this.setValues['super'].call( this,
+ oldqueue, true, true
+ );
+
+ return this;
+ }
+} );
diff --git a/src/bucket/QuoteDataBucket.js b/src/bucket/QuoteDataBucket.js
new file mode 100644
index 0000000..b5c3719
--- /dev/null
+++ b/src/bucket/QuoteDataBucket.js
@@ -0,0 +1,324 @@
+/**
+ * Key/value store
+ *
+ * Copyright (C) 2017 LoVullo Associates, Inc.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+var Class = require( 'easejs' ).Class,
+ Bucket = require( './Bucket' ),
+ EventEmitter = require( 'events' ).EventEmitter;
+
+/**
+ * General key/value store for document
+ *
+ * The term "Quote" here is an artifact from the initial design of the
+ * system used for insurance quoting. It will be renamed.
+ *
+ * @todo Rename to DocumentDataBucket
+ */
+module.exports = Class( 'QuoteDataBucket' )
+ .implement( Bucket )
+ .extend( EventEmitter,
+{
+ /**
+ * Triggered when data in the bucket is updated, before it's committed
+ * @type {string}
+ */
+ 'const EVENT_UPDATE': 'update',
+
+
+ /**
+ * Raw key/value store
+ * @type {Object}
+ */
+ 'private _data': {},
+
+
+ /**
+ * Cleans a name for use in the bucket
+ *
+ * Removes trailing brackets, if present
+ *
+ * @return {string} cleaned name
+ */
+ 'private _cleanName': function( name )
+ {
+ name = ''+name || '';
+
+ var bracket = name.indexOf( '[' );
+ if ( bracket == -1 )
+ {
+ return name;
+ }
+
+ return name.substring( 0, bracket );
+ },
+
+
+ /**
+ * Explicitly sets the contents of the bucket
+ *
+ * @param {Object.<string,Array>} data associative array of the data
+ *
+ * @param {boolean} merge_index whether to merge indexes individually
+ * @param {boolean} merge_null whether to merge undefined values (vs
+ * ignore)
+ *
+ * @return {QuoteDataBucket} self to allow for method chaining
+ */
+ 'public setValues': function( data, merge_index, merge_null )
+ {
+ this._mergeData( data, merge_index, merge_null );
+ return this;
+ },
+
+
+ /**
+ * Alias of setValues
+ *
+ * @return {QuoteDataBucket} self to allow for method chaining
+ */
+ 'public setCommittedValues': function()
+ {
+ return this.setValues.apply( this, arguments );
+ },
+
+
+ /**
+ * Clears all data from the bucket
+ *
+ * @return {QuoteDataBucket} self
+ */
+ 'public clear': function()
+ {
+ this._data = {};
+ return this;
+ },
+
+
+ /**
+ * Merges updated data with the existing data
+ *
+ * @param Object data updated data
+ *
+ * @return undefined
+ */
+ 'private _mergeData': function( data, merge_index, merge_null )
+ {
+ merge_index = !!merge_index; // default false
+ merge_null = !!merge_null; // default false
+
+ var ignore = {};
+
+ // remove any data that has not been updated (the hooks do processing on
+ // this data, often updating the DOM, so it's faster to do this than to
+ // have a lot of unnecessary DOM work done)
+ for ( name in data )
+ {
+ var data_set = data[ name ],
+ pre_set = this._data[ name ],
+ changed = false;
+
+ // if there's no previous data for this key, or the lengths vary,
+ // then we want to keep it
+ if ( ( pre_set === undefined )
+ || ( pre_set.length !== data_set.length )
+ )
+ {
+ continue;
+ }
+
+ for ( var i = 0, len = data_set.length; i < len; i++ )
+ {
+ if ( data_set[ i ] !== pre_set[ i ] )
+ {
+ changed = true;
+ break;
+ }
+ }
+
+ // data matches original---we do not want to delete it, since that
+ // would modify the provided object; instead, mark it to be ignored
+ if ( changed === false )
+ {
+ ignore[ name ] = true;
+ }
+ }
+
+ this.emit( this.__self.$('EVENT_UPDATE'), data );
+
+ for ( name in data )
+ {
+ if ( ignore[ name ] )
+ {
+ continue;
+ }
+
+ var data_set = data[ name ];
+
+ // if we're not supposed to merge the indexes one by one, just set
+ // it
+ if ( merge_index === false )
+ {
+ this._data[name] = data_set;
+ continue;
+ }
+
+ // initialize it if its undefined in the bucket
+ if ( this._data[name] === undefined )
+ {
+ this._data[name] = [];
+ }
+
+ // merge the indexes one by one to preserve existing data
+ var data_set_len = data_set.length;
+ for ( var i = 0; i < data_set_len; i++ )
+ {
+ // ignore undefined (since we're merging, if it's not set, then
+ // we don't want to remove the data that's already there)
+ if ( data_set[ i ] === undefined )
+ {
+ continue;
+ }
+
+ // ignore if set to null (implying the index was removed)
+ if ( !merge_null && data_set[i] === null )
+ {
+ // this marks the end of the array as far as we're concerned
+ this._data[ name ].length = i;
+ break;
+ }
+
+ this._data[name][i] = data_set[i];
+ }
+ }
+ },
+
+
+ /**
+ * Overwrites values in the original bucket
+ *
+ * For this buckeet, overwriteValues() is an alias for setValues() without
+ * index merging. However, other Bucket implementations may handle it
+ * differently.
+ *
+ * @param {Object.<string,Array>} data associative array of the data
+ *
+ * @return {Bucket} self
+ */
+ 'public overwriteValues': function( data )
+ {
+ this.setValues( data, false );
+ return this;
+ },
+
+
+ /**
+ * Calls a function for each each of the values in the bucket
+ *
+ * Note: This format is intended to be consistent with Array.forEach()
+ *
+ * @param {function(string,string)} callback function to call for each
+ * value in the bucket
+ *
+ * @return {QuoteDataBucket} self to allow for method chaining
+ */
+ 'public each': function( callback )
+ {
+ var bucket = this;
+
+ for ( var name in this._data )
+ {
+ callback( this._data[ name ], name );
+ }
+
+ return this;
+ },
+
+
+ /**
+ * Calls a function for each each of the values in the bucket matching the
+ * given predicate
+ *
+ * @param {function(string)} pred predicate
+ * @param {function( Object, number )} c function to call for each
+ * value in the bucket
+ *
+ * @return {StagingBucket} self
+ */
+ 'public filter': function( pred, c )
+ {
+ this.each( function( data, name )
+ {
+ if ( pred( name ) )
+ {
+ c( data, name );
+ }
+ } );
+ },
+
+
+ /**
+ * Returns the data for the requested field
+ *
+ * @param {string} name name of the field (with or without trailing brackets)
+ *
+ * @return {Array} data for the field, or empty array if none
+ */
+ 'public getDataByName': function( name )
+ {
+ var data = this._data[ this._cleanName( name ) ];
+
+ if ( data === undefined )
+ {
+ return [];
+ }
+
+ if ( data === null )
+ {
+ return null;
+ }
+
+ // return a copy of the data
+ return ( typeof data === 'object' ) ? data.slice( 0 ) : data;
+ },
+
+
+ /**
+ * Returns the data as a JSON string
+ *
+ * @return {string} data represented as JSON
+ */
+ 'public getDataJson': function()
+ {
+ return JSON.stringify( this._data );
+ },
+
+
+ /**
+ * Return raw bucket data
+ *
+ * TODO: remove; breaks encapsulation
+ *
+ * @return {Object} raw bucket data
+ */
+ 'public getData': function()
+ {
+ return this._data;
+ }
+} );
diff --git a/src/bucket/StagingBucket.js b/src/bucket/StagingBucket.js
new file mode 100644
index 0000000..907c9df
--- /dev/null
+++ b/src/bucket/StagingBucket.js
@@ -0,0 +1,566 @@
+/**
+ * Staging key/value store
+ *
+ * Copyright (C) 2017 LoVullo Associates, Inc.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+'use strict';
+
+
+const { Class } = require( 'easejs' );
+const Bucket = require( './Bucket' );
+const EventEmitter = require( 'events' ).EventEmitter;
+
+
+/**
+ * Stages and merges values into underlying key/value store
+ */
+module.exports = Class( 'StagingBucket' )
+ .implement( Bucket )
+ .extend( EventEmitter,
+{
+ /**
+ * Triggered when data in the bucket is updated, before it's committed
+ * @type {string}
+ */
+ 'const EVENT_UPDATE': 'update',
+
+ 'const EVENT_STAGING_PRE_UPDATE': 'preStagingUpdate',
+ 'const EVENT_STAGING_UPDATE': 'stagingUpdate',
+
+ 'const EVENT_PRE_COMMIT': 'preCommit',
+ 'const EVENT_COMMIT': 'commit',
+
+ 'const EVENT_STAGING_PRE_REVERT': 'preRevert',
+ 'const EVENT_STAGING_REVERT': 'revert',
+ 'const EVENT_STAGING_POST_REVERT': 'postRevert',
+
+
+ /**
+ * Bucket to wrap
+ * @type {Bucket}
+ */
+ 'private _bucket': null,
+
+
+ /**
+ * Contains staged (uncommitted) data
+ * @type {Object.<string,Array>}
+ */
+ 'private _staged': {},
+
+ /**
+ * Represents the current state of the bucket for fast retrieval
+ * @type {Object.<string,Array>}
+ */
+ 'private _curdata': {},
+
+ /**
+ * Whether data is staged but not committed
+ *
+ * Ah, brining back the "dirty" term from the good 'ol days of the "dirty
+ * bucket"!
+ *
+ * @type {boolean}
+ */
+ 'private _dirty': false,
+
+
+ /**
+ * Initializes staging bucket with the provided data bucket
+ *
+ * @param {Bucket} bucket bucket in which to store data
+ *
+ * @return {undefined}
+ */
+ 'public __construct': function( bucket )
+ {
+ this._bucket = bucket;
+
+ const _self = this;
+ const _event = this.__self.$('EVENT_UPDATE');
+
+ // forward events
+ bucket.on( _event, function( data )
+ {
+ _self.emit( _event, data );
+ } );
+
+ this._initState();
+ },
+
+
+ 'private _initState': function()
+ {
+ const data = this._bucket.getData();
+ const retdata = function() {};
+
+ // ensure that we don't modify the original data
+ retdata.prototype = data;
+
+ this._curdata = new retdata();
+ this._dirty = false;
+ },
+
+
+ 'protected merge': function( src, dest, nonull )
+ {
+ nonull = !!nonull;
+
+ const len = src.length;
+
+ for ( let i = 0; i < len; i++ )
+ {
+ let data = src[ i ];
+
+ // don't merge if it's undefined or if null and nulls were not
+ // permitted
+ if ( data === undefined )
+ {
+ continue;
+ }
+ else if ( nonull && ( data === null ) )
+ {
+ // nulls mark the end of the set
+ dest.length = i;
+ break;
+ }
+
+ // merge with bucket data
+ dest[ i ] = data;
+ }
+ },
+
+
+ /**
+ * Analgous to setValues(), but immediately commits the changes
+ *
+ * This still calls setValues() to ensure all events are properly kicked
+ * off.
+ */
+ 'public setCommittedValues': function( data /*, ...*/ )
+ {
+ this._bucket.setValues.apply( this._bucket, arguments );
+
+ // no use in triggering a pre-update, since these values are
+ // already committed
+ this.emit( this.__self.$('EVENT_STAGING_UPDATE'), data );
+
+ return this;
+ },
+
+
+ /**
+ * Determine whether values have changed
+ *
+ * If all values are identical to the current bucket values (relative to
+ * `merge_index`), returns `false`. Otherwise, this stops at the first
+ * recognized change and returns `true`.
+ *
+ * @param {Object.<string,Array>} data key/value data or diff
+ * @param {boolean} merge_index compare indexes individually
+ *
+ * @return {boolean} whether a change was recognized
+ */
+ 'private _hasChanged': function( data, merge_index )
+ {
+ for ( let name in data )
+ {
+ let values = data[ name ];
+ let cur = this._curdata[ name ] || [];
+
+ if ( !merge_index && ( values.length !== cur.length ) )
+ {
+ return true;
+ }
+
+ for ( let index in values )
+ {
+ if ( merge_index && ( values[ index ] === undefined ) )
+ {
+ continue;
+ }
+
+ if ( values[ index ] !== cur[ index ] )
+ {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ },
+
+
+ /**
+ * Explicitly sets the contents of the bucket
+ *
+ * @param {Object.<string,Array>} data associative array of the data
+ *
+ * @param {boolean} merge_index whether to merge indexes individually
+ * @param {boolean} merge_null whether to merge undefined values (vs
+ * ignore)
+ *
+ * @return {Bucket} self
+ */
+ 'virtual public setValues': function( data, merge_index, merge_null )
+ {
+ if ( !this._hasChanged( data, merge_index ) )
+ {
+ return;
+ }
+
+ this.emit( this.__self.$('EVENT_STAGING_PRE_UPDATE'), data );
+
+ for ( let name in data )
+ {
+ let item = Array.prototype.slice.call( data[ name ], 0 );
+
+ // initialize as array if necessary
+ if ( this._staged[ name ] === undefined )
+ {
+ this._staged[ name ] = [];
+ }
+
+ // since _curdata's prototype is a reference to the internal data of
+ // the other bucket (gah!---for perf reasons), we need to take care
+ // to ensure that we do not modify it...this accomplishes that
+ if ( Object.hasOwnProperty.call( this._curdata, name ) === false )
+ {
+ if ( this._curdata[ name ] !== undefined )
+ {
+ this._curdata[ name ] = Array.prototype.slice.call(
+ this._curdata[ name ], 0
+ );
+ }
+ else
+ {
+ this._curdata[ name ] = [];
+ }
+ }
+
+ if ( merge_index )
+ {
+ // merge with previous values
+ this.merge( item, this._staged[ name ] );
+
+ // we do not want nulls in our current representation of the
+ // data
+ this.merge( item, this._curdata[ name ], true );
+ }
+ else
+ {
+ // overwrite
+ this._staged[ name ] = item;
+ this._curdata[ name ] = item;
+ }
+ }
+
+ this._dirty = true;
+ this.emit( this.__self.$('EVENT_STAGING_UPDATE'), data );
+
+ return this;
+ },
+
+
+ /**
+ * Overwrites values in the original bucket
+ *
+ * @param {Object.<string,Array>} data associative array of the data
+ *
+ * @return {StagingBucket} self
+ */
+ 'public overwriteValues': function( data )
+ {
+ const new_data = {};
+
+ for ( let name in data )
+ {
+ new_data[ name ] = Array.prototype.slice.call( data[ name ], 0 );
+
+ // a terminating null ensures all data is overwritten, rather than
+ // just the beginning indexes
+ new_data[ name ].push( null );
+ }
+
+ return this.setValues( new_data, false );
+ },
+
+
+ /**
+ * Returns staged data
+ *
+ * @return {Object.<string,Array>}
+ */
+ 'public getDiff': function()
+ {
+ return this._staged;
+ },
+
+
+ /**
+ * Returns a field-oriented diff filled with all values rather than a
+ * value-oriented diff
+ *
+ * Only the fields that have changed are returned. Each field contains its
+ * actual value---not the diff representation of what portions of the field
+ * have changed.
+ *
+ * return {Object} filled diff
+ */
+ 'virtual public getFilledDiff': function()
+ {
+ const ret = {};
+
+ // return each staged field
+ for ( let field in this._staged )
+ {
+ // retrieve the current value for this field
+ ret[ field ] = Array.prototype.slice.call(
+ this._curdata[ field ], 0
+ );
+ }
+
+ return ret;
+ },
+
+
+ /**
+ * Reverts staged changes, preventing them from being committed
+ *
+ * This will also generate a diff and raise the same events that would be
+ * raised by setting values in the conventional manner, allowing reverts to
+ * transparently integrate with the remainder of the system.
+ *
+ * @return {StagingBucket} self
+ */
+ 'public revert': function( evented )
+ {
+ evented = ( evented === undefined ) ? true : !!evented;
+
+ const data = {};
+
+ // generate data for this revert (so that hooks may properly handle it)
+ for ( let name in this._staged )
+ {
+ let curstaged = this._staged[ name ];
+ let orig = this._bucket.getDataByName( name );
+
+ data[ name ] = [];
+ for ( let i in curstaged )
+ {
+ // if the original value is undefined, then we want to remove
+ // the value entirely, *not* set it to undefiend (which would
+ // affect the length of the array)
+ if ( orig[ i ] === undefined )
+ {
+ delete data[ name ][ i ];
+ continue;
+ }
+
+ data[ name ][ i ] = orig[ i ];
+ }
+ }
+
+ if ( evented )
+ {
+ this.emit( this.__self.$('EVENT_STAGING_PRE_REVERT'), data );
+ this.emit( this.__self.$('EVENT_STAGING_PRE_UPDATE'), data );
+ }
+
+ this._staged = {};
+ this._initState();
+
+ // everything after this point is evented
+ if ( !( evented ) )
+ {
+ return this;
+ }
+
+ // trigger revert after update (since we did preRevert before update;
+ // this also allows logic to disable further updates; DO NOT CHANGE
+ // ORDER WITHOUT LOOKING AT WHAT USES THIS!)
+ this.emit( this.__self.$('EVENT_STAGING_UPDATE'), data );
+ this.emit( this.__self.$('EVENT_STAGING_REVERT'), data );
+
+ // a distinct event lets hooks know that a revert has been completed
+ // (which may be useful for allowing asychronous data to be
+ // automatically committed following a revert, rather than once again
+ // allowing the staging bucket to be considred dirty)
+ this.emit( this.__self.$('EVENT_STAGING_POST_REVERT'), data );
+
+ return this;
+ },
+
+
+ /**
+ * Commits staged changes, merging them with the bucket
+ *
+ * @return {StagingBucket} self
+ */
+ 'public commit': function( store )
+ {
+ const old = this._staged;
+
+ this.emit( this.__self.$('EVENT_PRE_COMMIT') );
+
+ this._bucket.setValues( this._staged, true, false );
+ this._staged = {};
+
+ this.emit( this.__self.$('EVENT_COMMIT') );
+
+ this._initState();
+
+ if ( typeof store === 'object' )
+ {
+ store.old = old;
+ }
+
+ return this;
+ },
+
+
+ /**
+ * Clears all data from the bucket
+ *
+ * @return {Bucket} self
+ */
+ 'public clear': function()
+ {
+ this._bucket.clear();
+ return this;
+ },
+
+
+ /**
+ * Calls a function for each each of the values in the bucket
+ *
+ * @param {function( Object, number )} callback function to call for each
+ * value in the bucket
+ *
+ * @return {Bucket} self
+ */
+ 'virtual public each': function( callback )
+ {
+ for ( let name in this._curdata )
+ {
+ callback( this._curdata[ name ], name );
+ }
+
+ return this;
+ },
+
+
+ /**
+ * Returns the data for the requested field
+ *
+ * WARNING: This can be a potentially expensive operation if there is a
+ * great deal of staged data. The staged data is merged with the bucket data
+ * on each call. Do not make frequent calls to retrieve the same data. Cache
+ * it instead.
+ *
+ * @param {string} name field name (with or without trailing brackets)
+ *
+ * @return {Array} data for the field, or empty array if none
+ */
+ 'virtual public getDataByName': function( name )
+ {
+ if ( this._curdata[ name ] )
+ {
+ // important: return a clone so that operations on this data doesn't
+ // modify the bucket without us knowing!
+ return Array.prototype.slice.call( this._curdata[ name ] );
+ }
+
+ return [];
+ },
+
+
+ /**
+ * Returns original bucket data by name, even if there is data staged atop
+ * of it
+ *
+ * There is no additional overhead of this operation versus getDataByName()
+ *
+ * @param {string} name field name (with or without trailing brackets)
+ *
+ * @return {Array} data for the field, or empty array if none
+ */
+ 'public getOriginalDataByName': function( name )
+ {
+ return this._bucket.getDataByName( name );
+ },
+
+
+ /**
+ * Returns the data as a JSON string
+ *
+ * @return {string} data represented as JSON
+ */
+ 'public getDataJson': function()
+ {
+ return this._bucket.getDataJson();
+ },
+
+
+ /**
+ * Return raw bucket data
+ *
+ * todo: remove; breaks encapsulation
+ *
+ * @return {Object} raw bucket data
+ */
+ 'virtual public getData': function()
+ {
+ return this._curdata;
+ },
+
+
+ /**
+ * Calls a function for each each of the values in the bucket matching the
+ * given predicate
+ *
+ * @param {function(string)} pred predicate
+ * @param {function( Object, number )} c function to call for each
+ * value in the bucket
+ *
+ * @return {StagingBucket} self
+ */
+ 'public filter': function( pred, c )
+ {
+ this.each( function( data, name )
+ {
+ if ( pred( name ) )
+ {
+ c( data, name );
+ }
+ } );
+ },
+
+
+ 'virtual public hasIndex': function( name, i )
+ {
+ return ( this._curdata[ name ][ i ] !== undefined );
+ },
+
+
+ 'public isDirty': function()
+ {
+ return this._dirty;
+ }
+} );
diff --git a/src/bucket/StagingBucketAutoDiscard.js b/src/bucket/StagingBucketAutoDiscard.js
new file mode 100644
index 0000000..10d1dbf
--- /dev/null
+++ b/src/bucket/StagingBucketAutoDiscard.js
@@ -0,0 +1,103 @@
+/**
+ * Automatically discard staging bucket contents
+ *
+ * Copyright (C) 2017 LoVullo Associates, Inc.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+var Class = require( 'easejs' ).Class,
+ StagingBucket = require( './StagingBucket' );
+
+
+/**
+ * When enabled, automatically discards staging bucket contents on change.
+ *
+ * This may be useful when the staging bucket should be put into a temporary
+ * state where changes should be disallowed (e.g. in the middle of a
+ * revert).
+ */
+module.exports = Class( 'StagingBucketAutoDiscard',
+{
+ /**
+ * Automatically discards all staged data before it is processed for the
+ * given bucket
+ *
+ * This deletes the data before it is even merged into the staging bucket.
+ *
+ * N.B.: This method silently reverts any staged data currently in the
+ * bucket (without triggering any events), so it is important to handle
+ * existing data properly before calling this (unless the data is garbage).
+ *
+ * @param {StagingBucket} bucket staging bucket to enable on
+ *
+ * @return {StagingBucketAutoDiscard} self
+ */
+ 'public enable': function( bucket )
+ {
+ if ( !( Class.isA( StagingBucket, bucket ) ) )
+ {
+ throw Error( 'Required StagingBucket' );
+ }
+
+ // Unhook to prevent duplicate event hooks.
+ this.disable( bucket );
+
+ // prevent the data write from even being attempted and revert anything
+ // that may be currently in the bucket (without triggering events)
+ bucket
+ .on( 'preStagingUpdate', this._clobberFields )
+ .revert( false );
+
+ return this;
+ },
+
+
+ /**
+ * Disables auto-discard on the given bucket
+ *
+ * @param {StagingBucket} bucket staging bucket to disable on
+ *
+ * @return {StagingBucketAutoDiscard} self
+ */
+ 'public disable': function( bucket )
+ {
+ if ( !( Class.isA( StagingBucket, bucket ) ) )
+ {
+ throw Error( 'Required StagingBucket' );
+ }
+
+ bucket.removeListener( 'preStagingUpdate', this._clobberFields );
+ return this;
+ },
+
+
+ /**
+ * Deletes all fields on the given object
+ *
+ * @param {Object} data object to clobber
+ *
+ * @return {undefined}
+ */
+ 'private _clobberFields': function( data )
+ {
+ for ( var field in data )
+ {
+ // oops!
+ delete data[ field ];
+ }
+ }
+} );
diff --git a/src/client/ClientDependencyFactory.js b/src/client/ClientDependencyFactory.js
index bdf711d..178cdc5 100644
--- a/src/client/ClientDependencyFactory.js
+++ b/src/client/ClientDependencyFactory.js
@@ -55,10 +55,10 @@ var Step = require( '../step/Step' ),
BaseQuote = require( 'program/quote/BaseQuote' ),
QuoteClient = require( 'program/QuoteClient' ),
- QuoteDataBucket = require( 'program/QuoteDataBucket' ),
- StagingBucket = require( 'program/StagingBucket' ),
- StagingBucketAutoDiscard = require( 'program/StagingBucketAutoDiscard' ),
- DelayedStagingBucket = require( 'program/bucket/DelayedStagingBucket' ),
+ QuoteDataBucket = require( '../bucket/QuoteDataBucket' ),
+ StagingBucket = require( '../bucket/StagingBucket' ),
+ StagingBucketAutoDiscard = require( '../bucket/StagingBucketAutoDiscard' ),
+ DelayedStagingBucket = require( '../bucket/DelayedStagingBucket' ),
DataValidator = require( '../validate/DataValidator' ),
ValidStateMonitor = require( '../validate/ValidStateMonitor' ),
diff --git a/src/validate/DataValidator.js b/src/validate/DataValidator.js
index 3690ac5..56ae480 100644
--- a/src/validate/DataValidator.js
+++ b/src/validate/DataValidator.js
@@ -61,6 +61,12 @@ module.exports = Class( 'DataValidator',
*/
'private _stores': {},
+ /**
+ * Pending validation
+ * @type {Promise}
+ */
+ 'private _pending': null,
+
/**
* Initialize validator
@@ -103,6 +109,10 @@ module.exports = Class( 'DataValidator',
/**
* Validate diff and update field monitor
*
+ * If an operation is pending completion, all further requests to this
+ * object will be queued to prevent unexpected/inconsistent system
+ * states and race conditions.
+ *
* The external validator `validatef` is a kluge while the system
* undergoes refactoring.
*
@@ -116,30 +126,40 @@ module.exports = Class( 'DataValidator',
{
const _self = this;
- let failures = {};
-
- if ( diff !== undefined )
+ return this._onceReady( () =>
{
- _self._bucket_validator.validate( diff, ( name, value, i ) =>
+ let failures = {};
+
+ if ( diff !== undefined )
{
- diff[ name ][ i ] = undefined;
+ _self._bucket_validator.validate( diff, ( name, value, i ) =>
+ {
+ diff[ name ][ i ] = undefined;
- ( failures[ name ] = failures[ name ] || {} )[ i ] =
- _self._factory.createFieldFailure( name, i, value );
- }, true );
+ ( failures[ name ] = failures[ name ] || {} )[ i ] =
+ _self._factory.createFieldFailure( name, i, value );
+ }, true );
- validatef && validatef( diff, failures );
- }
+ validatef && validatef( diff, failures );
+ }
- // XXX: this assumes that the above is synchronous
- return this._populateStore( classes, this._stores.cstore, 'indexes' )
- .then( () => this.updateFailures( diff, failures ) );
+ // XXX: this assumes that the above is synchronous
+ return this._pending =
+ this._populateStore(
+ classes, this._stores.cstore, 'indexes'
+ )
+ .then( () => this._doUpdateFailures( diff, failures ) );
+ } );
},
/**
* Update failures from external validation
*
+ * If an operation is pending completion, all further requests to this
+ * object will be queued to prevent unexpected/inconsistent system
+ * states and race conditions.
+ *
* TODO: This is a transitional API---we should handle all validations,
* not allow external systems to meddle in our affairs.
*
@@ -150,6 +170,23 @@ module.exports = Class( 'DataValidator',
*/
'public updateFailures'( diff, failures )
{
+ return this._onceReady( () =>
+ {
+ return this._doUpdateFailures( diff, failures );
+ } );
+ },
+
+
+ /**
+ * Update failures from external validation
+ *
+ * @param {Object} diff bucket diff
+ * @param {Object} failures failures per field name and index
+ *
+ * @return {Promise} promise to populate internal store
+ */
+ 'private _doUpdateFailures': function( diff, failures )
+ {
return this._populateStore( diff, this._stores.bstore ).then( () =>
this._field_monitor.update(
this._stores.store, failures
@@ -174,11 +211,32 @@ module.exports = Class( 'DataValidator',
'public clearFailures'( failures )
{
this._field_monitor.clearFailures( failures );
+
return this;
},
/**
+ * Wait until all requests are complete and then trigger callback
+ *
+ * @param {Function} callback callback to trigger when ready
+ *
+ * @return {Promise}
+ */
+ 'private _onceReady': function( callback )
+ {
+ if ( this._pending )
+ {
+ this._pending.then( callback );
+ return this._pending;
+ }
+
+ return this._pending = callback()
+ .then( () => this._pending = null );
+ },
+
+
+ /**
* Populate store with data
*
* This effectively converts a basic array into a `Store`. This is
@@ -193,8 +251,7 @@ module.exports = Class( 'DataValidator',
{
if ( data === undefined )
{
- // it's important that we don't re-use previous state
- return store.clear().then( [] );
+ return Promise.resolve( [] );
}
const mapf = ( subkey !== undefined )
diff --git a/test/bucket/StagingBucketTest.js b/test/bucket/StagingBucketTest.js
new file mode 100644
index 0000000..b90d359
--- /dev/null
+++ b/test/bucket/StagingBucketTest.js
@@ -0,0 +1,185 @@
+/**
+ * Test of staging key/value store
+ *
+ * Copyright (C) 2017 LoVullo Associates, Inc.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * @todo This needs tests for the rest of StagingBucket
+ */
+
+"use strict";
+
+const { Class } = require( 'easejs' );
+const root = require( '../../' );
+const expect = require( 'chai' ).expect;
+
+const {
+ Bucket,
+ StagingBucket: Sut
+} = root.bucket;
+
+
+describe( 'StagingBucket', () =>
+{
+ describe( 'pre-update event', () =>
+ {
+ it( 'allows updating data before set', () =>
+ {
+ const sut = Sut( createStubBucket() );
+
+ const data = {
+ foo: [ 'bar', 'baz' ],
+ };
+
+ sut.on( 'preStagingUpdate', data =>
+ {
+ data.foo[ 1 ] = 'quux';
+ } );
+
+ // triggers setValues
+ sut.setValues( data );
+
+ expect( sut.getDataByName( 'foo' ) )
+ .to.deep.equal( [ 'bar', 'quux' ] );
+ } );
+
+
+ [
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ 'bar', 'baz' ] },
+ merge_index: true,
+ is_change: false,
+ },
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ 'bar', 'baz' ] },
+ merge_index: false,
+ is_change: false,
+ },
+
+ // actual changes
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ 'change', 'baz' ] },
+ merge_index: true,
+ is_change: true,
+ },
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ 'bar', 'change' ] },
+ merge_index: true,
+ is_change: true,
+ },
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ undefined, 'change' ] },
+ merge_index: true,
+ is_change: true,
+ },
+
+ // single-index changes make sense only if merge_index
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ undefined, 'baz' ] },
+ merge_index: true,
+ is_change: false,
+ },
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ 'bar', undefined ] },
+ merge_index: true,
+ is_change: false,
+ },
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ 'bar', null ] },
+ merge_index: true,
+ is_change: true,
+ },
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [] },
+ merge_index: true,
+ is_change: false,
+ },
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [] },
+ merge_index: false,
+ is_change: true,
+ },
+ {
+ initial: { foo: [ 'bar' ] },
+ update: { foo: [ 'bar', undefined ] },
+ merge_index: false,
+ is_change: true,
+ },
+
+ // only interpreted as a diff if merge_index
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: { foo: [ 'bar', undefined ] },
+ merge_index: false,
+ is_change: true,
+ },
+
+ // no index at all
+ {
+ initial: { foo: [ 'bar', 'baz' ] },
+ update: {},
+ merge_index: true,
+ is_change: false,
+ },
+ ].forEach( ( { initial, update, merge_index, is_change }, i ) =>
+ {
+ it( `is emitted only when data is changed (${i})`, () =>
+ {
+ const sut = Sut( createStubBucket() );
+ let called = false;
+
+ sut.setValues( initial, merge_index );
+
+ sut.on( 'preStagingUpdate', () => called = true );
+ sut.setValues( update, merge_index );
+
+ expect( called ).to.equal( is_change );
+ } );
+ } );
+ } );
+} );
+
+
+function createStubBucket( bucket_obj )
+{
+ return Class.implement( Bucket ).extend(
+ {
+ 'public getData'()
+ {
+ return bucket_obj;
+ },
+
+ 'public setValues'( data, merge_index, merge_null ) {},
+ 'public overwriteValues'( data ) {},
+ 'public clear'() {},
+ 'public each'( callback ) {},
+ 'public getDataByName'( name ) {},
+ 'public getDataJson'() {},
+ 'public filter'( pred, callback) {},
+ 'on'() {},
+ } )();
+}
diff --git a/test/validate/DataValidatorTest.js b/test/validate/DataValidatorTest.js
index e2364d3..ed9a932 100644
--- a/test/validate/DataValidatorTest.js
+++ b/test/validate/DataValidatorTest.js
@@ -54,14 +54,13 @@ describe( 'DataValidator', () =>
}
);
- const vmonitor = ValidStateMonitor();
- const dep_factory = createMockDependencyFactory();
+ const { sut, vmonitor, dep_factory, getStore } = createStubs( {
+ bvalidator: bvalidator,
+ } );
- const getStore = createStubStore();
const { bstore } = getStore();
- const mock_bstore = sinon.mock( bstore );
-
+ const mock_bstore = sinon.mock( bstore );
const mock_vmonitor = sinon.mock( vmonitor );
const mock_dep_factory = sinon.mock( dep_factory );
@@ -110,11 +109,7 @@ describe( 'DataValidator', () =>
second: { indexes: [ 0, 1 ], is: true },
};
- const bvalidator = createMockBucketValidator();
- const vmonitor = ValidStateMonitor();
- const dep_factory = createMockDependencyFactory();
-
- const getStore = createStubStore();
+ const { sut, getStore } = createStubs( {} );
const { cstore } = getStore();
const mock_cstore = sinon.mock( cstore );
@@ -124,7 +119,7 @@ describe( 'DataValidator', () =>
.once()
.returns( Promise.resolve( cstore ) );
- return Sut( bvalidator, vmonitor, dep_factory, getStore )
+ return sut
.validate( {}, classes )
.then( () =>
{
@@ -156,9 +151,9 @@ describe( 'DataValidator', () =>
}
);
- const vmonitor = ValidStateMonitor();
- const dep_factory = createMockDependencyFactory();
- const getStore = createStubStore();
+ const { sut, vmonitor, dep_factory, getStore } = createStubs( {
+ bvalidator: bvalidator,
+ } );
const diff = { foo: [ 'a', 'b', 'c' ] };
const expected_failures = {
@@ -196,9 +191,7 @@ describe( 'DataValidator', () =>
it( 'rejects if field monitor update rejects', () =>
{
- const bvalidator = createMockBucketValidator();
- const vmonitor = ValidStateMonitor();
- const dep_factory = createMockDependencyFactory();
+ const { sut, vmonitor } = createStubs( {} );
const expected_e = Error();
@@ -207,50 +200,158 @@ describe( 'DataValidator', () =>
.once()
.returns( Promise.reject( expected_e ) );
- return expect(
- Sut( bvalidator, vmonitor, dep_factory, createStubStore() )
- .validate( {} )
- ).to.eventually.be.rejectedWith( expected_e );
+ return expect( sut.validate( {} ) ).
+ to.eventually.be.rejectedWith( expected_e );
} );
- [
- [],
- [ {} ],
- [ undefined ],
- [ undefined, {} ],
- [ undefined, undefined ],
- [ {}, undefined ],
- ].forEach( args => it( 'does not re-use previous store state', () =>
+ // otherwise system might get into an unexpected state
+ it( 'queues concurrent validations', () =>
{
- const bvalidator = createMockBucketValidator();
- const vmonitor = ValidStateMonitor();
- const dep_factory = createMockDependencyFactory();
-
- const stores = {
- store: MemoryStore(),
- bstore: sinon.createStubInstance( MemoryStore ),
- cstore: sinon.createStubInstance( MemoryStore ),
- };
+ const expected_failure = {};
+
+ let vcalled = 0;
+
+ const bvalidator = createMockBucketValidator(
+ ( _, __, ___ ) => vcalled++
+ );
+
+ const vmonitor = sinon.createStubInstance( ValidStateMonitor );
- const { bstore, cstore } = stores;
+ const { sut, getStore } = createStubs( {
+ bvalidator: bvalidator,
+ vmonitor: vmonitor,
+ } );
- const cleared = which =>
+ const diff_a = { foo: [ 'a', 'b', 'c' ] };
+ const diff_b = { foo: [ 'd' ] };
+
+ const validatef = ( diff, failures ) =>
+ {
+ // not a real failure; just used to transfer state to stub
+ // (see below)
+ failures.failedon = diff;
+ };
+
+ return new Promise( ( accept, reject ) =>
{
- cleared[ which ] = true;
- return Promise.resolve();
+ // by the time it gets to this the second time, store could
+ // be in any sort of state depending on what callbacks were
+ // invoked first (implementation details)
+ vmonitor.update = ( _, failures ) =>
+ {
+ const orig_diff = failures.failedon;
+
+ // if the external validator was called twice, then they
+ // didn't wait for us to finish
+ if ( ( orig_diff === diff_a ) && ( vcalled !== 1 ) )
+ {
+ reject( Error( "Request not queued" ) );
+ }
+
+ // if this key doesn't exist, then the store has been
+ // cleared (which happens before it's re-populated with
+ // the new diff)
+ return expect( getStore().bstore.get( 'foo' ) )
+ .to.eventually.deep.equal( orig_diff.foo )
+ .then( () => {
+ // the second test, after which we're done
+ if ( orig_diff === diff_b )
+ {
+ accept();
+ }
+ } )
+ .catch( e => reject( e ) );
+ };
+
+ sut.validate( diff_a, {}, validatef );
+ sut.validate( diff_b, {}, validatef );
+ } );
+ } );
+ } );
+
+
+ describe( '#updateFailures', () =>
+ {
+ it( 'directly updates failures', () =>
+ {
+ const { sut, vmonitor, getStore } = createStubs( {} );
+ const { bstore } = getStore();
+
+ const diff = {
+ foo: [ 'bar' ],
+ bar: [ 'baz' ],
};
- bstore.clear = () => cleared( 'b' );
- cstore.clear = () => cleared( 'c' );
+ const failures = {};
- const sut = Sut( bvalidator, vmonitor, dep_factory, () => stores );
+ const mock_vmonitor = sinon.mock( vmonitor );
+ const mock_bstore = sinon.mock( bstore );
- return sut.validate.apply( sut, args )
+ // clears previous diffs
+ mock_bstore.expects( 'clear' )
+ .once()
+ .returns( Promise.resolve( bstore) );
+
+ mock_vmonitor
+ .expects( 'update' )
+ .once()
+ .withExactArgs( getStore().store, failures );
+
+ return sut
+ .updateFailures( diff, failures )
.then( () =>
- expect( cleared.b && cleared.c ).to.be.true
- );
- } ) );
+ {
+ mock_vmonitor.verify();
+ mock_bstore.verify();
+
+ // keep in mind that we are using MemoryStore for this
+ // test (whereas a real implementation would probably be
+ // using a DiffStore)
+ return Promise.all(
+ Object.keys( diff ).map( key =>
+ expect( bstore.get( key ) )
+ .to.eventually.deep.equal( diff[ key ] )
+ )
+ );
+ } );
+ } );
+
+
+ it( 'queues concurrent requests', () =>
+ {
+ const { sut, vmonitor, getStore } = createStubs( {
+ vmonitor: sinon.createStubInstance( ValidStateMonitor ),
+ } );
+ const { bstore } = getStore();
+
+ const faila = {};
+ const failb = {};
+
+ let running_first = true;
+
+ vmonitor.update = ( _, fail ) =>
+ {
+ if ( fail === failb )
+ {
+ if ( running_first === true )
+ {
+ return Promise.reject( Error(
+ "Request not queued"
+ ) );
+ }
+ }
+
+ return Promise.resolve( true );
+ };
+
+ return Promise.all( [
+ sut.updateFailures( {}, faila )
+ .then( () => running_first = false ),
+
+ sut.updateFailures( {}, failb ),
+ ] );
+ } );
} );
@@ -258,15 +359,8 @@ describe( 'DataValidator', () =>
{
it( 'proxies to validator', () =>
{
- const bvalidator = createMockBucketValidator();
- const vmonitor = ValidStateMonitor();
- const dep_factory = createMockDependencyFactory();
-
- const mock_vmonitor = sinon.mock( vmonitor );
-
- const sut = Sut(
- bvalidator, vmonitor, dep_factory, createStubStore()
- );
+ const { sut, vmonitor } = createStubs( {} );
+ const mock_vmonitor = sinon.mock( vmonitor );
const failures = [ 'foo', 'bar' ];
@@ -316,3 +410,20 @@ function createStubStore()
return () => stores;
}
+
+
+function createStubs( {
+ bvalidator = createMockBucketValidator(),
+ vmonitor = ValidStateMonitor(),
+ dep_factory = createMockDependencyFactory(),
+ getStore = createStubStore(),
+} )
+{
+ return {
+ bvalidator: bvalidator,
+ vmonitor: vmonitor,
+ dep_factory: dep_factory,
+ getStore: getStore,
+ sut: Sut( bvalidator, vmonitor, dep_factory, getStore ),
+ };
+}