diff options
-rw-r--r-- | src/bucket/DelayedRecursionError.js | 26 | ||||
-rw-r--r-- | src/bucket/DelayedStagingBucket.js | 89 | ||||
-rw-r--r-- | src/client/dapi/DataApiMediator.js | 6 | ||||
-rw-r--r-- | test/bucket/DelayedStagingBucketTest.js | 191 | ||||
-rw-r--r-- | test/client/dapi/DataApiMediatorTest.js | 11 |
5 files changed, 306 insertions, 17 deletions
diff --git a/src/bucket/DelayedRecursionError.js b/src/bucket/DelayedRecursionError.js new file mode 100644 index 0000000..9981c91 --- /dev/null +++ b/src/bucket/DelayedRecursionError.js @@ -0,0 +1,26 @@ +/** + * DelayedRecursionError + * + * Copyright (C) 2018 R-T Specialty, LLC. + * + * This file is part of the Liza Data Collection Framework + * + * liza is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +const { Class } = require( 'easejs' ); + + +module.exports = Class( 'DelayedRecursionError' ) + .extend( Error, {} ); diff --git a/src/bucket/DelayedStagingBucket.js b/src/bucket/DelayedStagingBucket.js index 0621797..a47bf5f 100644 --- a/src/bucket/DelayedStagingBucket.js +++ b/src/bucket/DelayedStagingBucket.js @@ -19,16 +19,19 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -var Class = require( 'easejs' ).Class, - StagingBucket = require( './StagingBucket' ); +const { Class } = require( 'easejs' ); +const StagingBucket = require( './StagingBucket' ); +const DelayedRecursionError = require( './DelayedRecursionError' ); /** - * Holds changes until explicitly processed to avoid cascades - * - * Since each write could trigger any number of event listeners, writes - * should be queued and done en-masse. - */ + * Holds changes until explicitly processed to avoid cascades + * + * Since each write could trigger any number of event listeners, writes + * should be queued and done en-masse. + * + * TODO: Convert to Trait. + */ module.exports = Class( 'DelayedStagingBucket' ) .extend( StagingBucket, { @@ -44,6 +47,12 @@ module.exports = Class( 'DelayedStagingBucket' ) */ 'private _timer': 0, + /** + * Whether #processValues is still on the stack + * @type {boolean} + */ + 'private _processing': 0, + 'public override setValues': function( data ) { @@ -73,8 +82,12 @@ module.exports = Class( 'DelayedStagingBucket' ) // invoke when stack clears var _self = this; - this._timer = setTimeout( function() + this._timer = setTimeout( () => { + // just in case something prevented us from decrementing the + // lock (e.g. an exception) + this._processing = 0; + _self.processValues(); }, 0 ); }, @@ -107,7 +120,7 @@ module.exports = Class( 'DelayedStagingBucket' ) { // if enqueued data is requested, then we have no choice but to merge to // ensure that the data is up-to-date - if ( this._queued[ name ] ) + if ( this._queued[ name ] !== undefined ) { this.processValues(); } @@ -154,14 +167,60 @@ module.exports = Class( 'DelayedStagingBucket' ) // since additional data may be queued as a consequence of the below // set, prepare for it by providing an empty queue - var oldqueue = this._queued; - this._queued = {}; - this._timer = 0; + this._withProcessLock( () => + { + var oldqueue = this._queued; + this._queued = {}; + this._timer = 0; - this.setValues['super'].call( this, - oldqueue, true, true - ); + this.setValues['super'].call( this, + oldqueue, true, true + ); + } ); return this; + }, + + + /** + * Increment lock for duration of function F + * + * The lock is incremented before calling F and decremented after F + * returns. The maximum lock count is hard-coded to 5, after which an + * exception will be thrown. The intent of this value is to provide a + * little bit of flexibility to hook implementations that allows for + * some gentle recursion, but preempts hooks when it looks like things + * may be going awry. + * + * @param {Function} f function to call + * + * @throws {DelayedRecursionError} when lock count exceeds maximum + * + * @return {undefined} + */ + 'private _withProcessLock'( f ) + { + // protect against runaway recursion from hooks + if ( this._processing === 5 ) + { + // throwing the exception will halt execution of the stack if + // uncaught, meaning _processing will never be fully + // decremented, so clear it out + this._processing = 0; + + throw DelayedRecursionError( + "Recursion on DelayedStagingBucket#processValues " + + "(likely caused by a bucket hook)" + ); + } + + // note that this isn't guarded with a try/catch, because we handle + // resetting the lock in a couple other places---this is more + // fool-proof, since catching an exception mail fail in certain + // circumstances (e.g. when reaching a resource limit, like + // stack/memory) + this._processing++; + f(); + this._processing--; } } ); diff --git a/src/client/dapi/DataApiMediator.js b/src/client/dapi/DataApiMediator.js index bdca195..89b600b 100644 --- a/src/client/dapi/DataApiMediator.js +++ b/src/client/dapi/DataApiMediator.js @@ -177,7 +177,11 @@ module.exports = Class( 'DataApiMediator', update[ name ] = field_update; - quote.setData( update ); + // allow the stack to clear before setting data to allow any + // existing bucket processing to complete before hooks are kicked + // off yet again (which, in practice, could otherwise result in + // infinite recursion depending on what the hooks are doing) + setTimeout( () => quote.setData( update ) ); }, diff --git a/test/bucket/DelayedStagingBucketTest.js b/test/bucket/DelayedStagingBucketTest.js new file mode 100644 index 0000000..0349ca1 --- /dev/null +++ b/test/bucket/DelayedStagingBucketTest.js @@ -0,0 +1,191 @@ +/** + * Tests DelayedStagingBucket + * + * Copyright (C) 2018 R-T Specialty, LLC. + * + * This file is part of the Liza Data Collection Framework + * + * Liza is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +'use strict'; + +const { expect } = require( 'chai' ); + +const { + DelayedStagingBucket: Sut, + DelayedRecursionError, +} = require( '../../' ).bucket; + + +describe( "DelayedStagingBucket", () => +{ + it( "sets values after stack clears", done => + { + const name = 'foo'; + const value = [ 'value' ]; + + // see end of this test + let ready = false; + + const bucket = { + on() {}, + + setValues( given_data ) + { + // set at the end of the test to ensure that this is + // done asynchronously + expect( ready ).to.be.true; + + expect( given_data ).to.deep.equal( { [name]: value } ); + + done(); + }, + + getData: () => ( {} ), + }; + + // queue set (should not yet call bucket#setValues) + const sut = Sut( bucket ).setValues( { [name]: value } ); + + // to write to the underlying bucket, we need to commit, but we + // have to do so after the async code runs (we should convert + // the SUT to a trait and avoid this madness) + setTimeout( () => sut.commit(), 0 ); + + // we're ready for the set to happen (once the stack clears) + ready = true; + } ); + + + describe( "#getDataByName", () => + { + [ + { + label: 'processes immediately retrieving modified key', + name: 'foo', + get_name: 'foo', + immediate: true, + }, + { + label: 'does not processes immediately retrieving unmodified key', + name: 'foo', + get_name: 'bar', + immediate: false, + }, + ].forEach( ( { label, name, get_name, immediate } ) => + { + it( label, () => + { + const value = [ 'getDataByName value' ]; + + let called_set = false; + + const bucket = { + on() {}, + + setValues( given_data ) + { + called_set = ( given_data[ name ] !== undefined ); + }, + + getData: () => ( {} ), + }; + + // queue set (should not yet call bucket#setValues) + const sut = Sut( bucket ).setValues( { [name]: value } ); + + // force processing of data by requesting same value (note that + // commit is necessary since the SUT extends StagingBucket) + sut.getDataByName( get_name ); + sut.commit(); + + expect( called_set ).to.equal( immediate ); + } ); + } ); + + + // Invoking processValues() writes to the underlying bucket which + // may cause hooks to be invoked, which in turn may cause the + // DelayedStagingBucket to be referenced, which would lead to + // infinite recursion. This doesn't solve that problem, but it does + // provide a useful error in case it happens, and preempts the + // wasteful recursion which will exhaust the stack. + [ + { + n: 1, + fail: false, + }, + { + n: 3, + fail: false, + }, + { + n: 5, + fail: true, + }, + { + n: 7, + fail: true, + }, + ].forEach( ( { n, fail } ) => + { + it( `throws error on deeply recursive processValues (n=${n})`, () => + { + const name = 'recursive'; + const value = [ 'getDataByName value' ]; + + let called_set = false; + + const bucket = { + on() {}, + setValues( given_data ) {}, + getData: () => ( {} ), + }; + + // queue set (should not yet call bucket#setValues) + const sut = Sut( bucket ).setValues( { [name]: value } ); + + let calln = 0; + + // this hook will trigger the recursion (the set is required + // to start another timer; see #processValues) + sut.on( 'preStagingUpdate', () => + { + // stop recursing at our goal + if ( calln++ === n ) + { + return; + } + + sut.setValues( { [name]: value } ) + .getDataByName( name ) + } ); + + // force processing of data by requesting same value + if ( fail ) + { + expect( () => sut.getDataByName( name ) ) + .to.throw( DelayedRecursionError ); + } + else + { + expect( () => sut.getDataByName( name ) ) + .to.not.throw( Error ); + } + } ); + } ); + } ); +} ); + diff --git a/test/client/dapi/DataApiMediatorTest.js b/test/client/dapi/DataApiMediatorTest.js index fee5f56..62e12b1 100644 --- a/test/client/dapi/DataApiMediatorTest.js +++ b/test/client/dapi/DataApiMediatorTest.js @@ -319,7 +319,8 @@ describe( "DataApiMediator", () => { it( label, done => { - let set_options = false; + let set_options = false; + let stack_cleared = false; const quote = { getDataByName( given_name ) @@ -329,6 +330,9 @@ describe( "DataApiMediator", () => setData( given_data ) { + // we should have allowed the stack to clear first + expect( stack_cleared ).to.be.true; + expect( given_data ).to.deep.equal( expected ); // should have called setOptions by now @@ -382,6 +386,11 @@ describe( "DataApiMediator", () => dapi_manager.emit( 'updateFieldData', name, index, val_label, results ); + + // #setData should be triggered after the stack clears to + // #mitigate issues with hooks causing too much / infinite + // #recursion on the bucket on the same stack + stack_cleared = true; } ); } ); |