Mike Gerwitz

Activist for User Freedom

aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMike Gerwitz <mike.gerwitz@rtspecialty.com>2019-10-18 16:10:08 -0400
committerMike Gerwitz <mike.gerwitz@rtspecialty.com>2019-10-18 16:10:27 -0400
commit596944a4ea323c923dc6677b456f632218e02153 (patch)
treeec65998036b57cfca0de871a9359030677535b61
parent20ddff0f6e3752ac27f761ac2bab0d69b6208df0 (diff)
parentdecf6497deee20a1a61c9cec9cd3a90110c390ed (diff)
downloadliza-596944a4ea323c923dc6677b456f632218e02153.tar.gz
liza-596944a4ea323c923dc6677b456f632218e02153.tar.bz2
liza-596944a4ea323c923dc6677b456f632218e02153.zip
Server-side DataAPI request precedence and TypeScript introduction
This change was used as a guinea pig for introducing TypeScript, so there's a lot here. <<
-rw-r--r--.gitignore8
-rw-r--r--Makefile.am18
-rw-r--r--README.md2
-rw-r--r--doc/hacking.texi188
-rw-r--r--doc/server.texi3
-rw-r--r--npm-shrinkwrap.json93
-rw-r--r--package.json.in15
-rw-r--r--src/dapi/AutoRetry.js10
-rw-r--r--src/dapi/BucketDataApi.js2
-rw-r--r--src/dapi/DataApi.ts (renamed from src/dapi/DataApi.js)55
-rw-r--r--src/dapi/DataApiFactory.js4
-rw-r--r--src/dapi/DataApiManager.js2
-rw-r--r--src/dapi/QuoteDataApi.js2
-rw-r--r--src/dapi/RestDataApi.js2
-rw-r--r--src/dapi/RestrictedDataApi.js13
-rw-r--r--src/dapi/StaticAdditionDataApi.js11
-rw-r--r--src/dapi/format/JsonResponse.js4
-rw-r--r--src/dapi/format/ResponseApply.js4
-rw-r--r--src/dapi/http/HttpDataApi.js2
-rw-r--r--src/document/Document.ts37
-rw-r--r--src/error/ChainedError.ts73
-rw-r--r--src/error/ContextError.ts93
-rw-r--r--src/server/DocumentServer.js72
-rw-r--r--src/server/Server.js2
-rw-r--r--src/server/daemon/controller.js152
-rw-r--r--src/server/dapi/TokenedDataApi.ts168
-rw-r--r--src/server/request/DataProcessor.js4
-rw-r--r--src/server/request/ServerDataApiFactory.js50
-rw-r--r--src/server/service/TokenDao.js233
-rw-r--r--src/server/service/TokenedService.js144
-rw-r--r--src/server/token/MongoTokenDao.ts393
-rw-r--r--src/server/token/Token.ts115
-rw-r--r--src/server/token/TokenDao.ts204
-rw-r--r--src/server/token/UnknownTokenError.ts27
-rw-r--r--src/server/token/store/PersistentTokenStore.ts279
-rw-r--r--src/server/token/store/TokenStore.ts120
-rw-r--r--src/types/misc.d.ts56
-rw-r--r--src/types/mongodb.d.ts161
-rw-r--r--src/types/naugty.d.ts48
-rw-r--r--test/dapi/AutoRetryTest.js2
-rw-r--r--test/dapi/DummyDataApi.js4
-rw-r--r--test/dapi/format/JsonResponseTest.js2
-rw-r--r--test/error/ChainedErrorTest.ts67
-rw-r--r--test/error/ContextErrorTest.ts68
-rw-r--r--test/event/FieldVisibilityEventHandlerTest.js2
-rw-r--r--test/server/dapi/TokenedDataApiTest.ts242
-rw-r--r--test/server/service/RatingServiceSubmitNotifyTest.js2
-rw-r--r--test/server/token/MongoTokenDaoTest.ts531
-rw-r--r--test/server/token/store/PersistentTokenStoreTest.ts441
-rw-r--r--tsconfig.json20
50 files changed, 3751 insertions, 499 deletions
diff --git a/.gitignore b/.gitignore
index b7e2a1b..61aba11 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,6 +13,10 @@ src/version.js
Makefile
package.json
+# generated by Makefile
+src/.gitignore
+test/.gitignore
+
# generated by gen-index
src/**/index.js
@@ -22,3 +26,7 @@ src/**/index.js
# npm
node_modules
+
+# typescript
+tsconfig.tsbuildinfo
+
diff --git a/Makefile.am b/Makefile.am
index fe2c8e5..ab04456 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -1,6 +1,6 @@
## liza Makefile.am for automake
#
-# Copyright (C) 2014 R-T Specialty, LLC.
+# Copyright (C) 2010-2019 R-T Specialty, LLC.
#
# This file is part of liza.
#
@@ -24,6 +24,8 @@ path_test = $(top_builddir)/test
namespaces=$(shell find src -type d)
nsindex=$(addsuffix /index.js, $(namespaces))
+tsout = tsconfig.tsbuildinfo
+
SUBDIRS = doc
EXTRA_DIST = $(path_src) package.json index.js $(path_src)/version.js \
tools autogen.sh README.md $(path_test) \
@@ -35,12 +37,22 @@ all-am: modindex
dist-hook: check
+# used to force typescript compilation (it handles its own incremental
+# compilation and dependency discovery)
+$(tsout): FORCE $(path_src)/.gitignore $(path_test)/.gitignore
+ node_modules/.bin/tsc
+
modindex: $(nsindex)
-%/index.js: FORCE
+%/index.js: FORCE $(tsout)
./tools/gen-index "$*" > "$@"
+# ignore compiled JS files (this can be removed once all files are TS)
+%/.gitignore: FORCE
+ @echo "# THIS FILE IS GENERATED; DO NOT MODIFY!" > $@
+ ( cd $*/ && find . -name '*.ts' -printf "%P\n" | sed 's/\.ts$$/.js/' ) >> $@
+
test: check
-check:
+check: $(tsout)
PATH="$(PATH):$(CURDIR)/node_modules/mocha/bin" \
mocha @NODE_DESTRUCTURE@ \
--require $(path_test)/pre.js \
diff --git a/README.md b/README.md
index 4dcf9f3..9d626fa 100644
--- a/README.md
+++ b/README.md
@@ -39,8 +39,8 @@ If `configure` is not available, see the section "Configuring" above.
```
$ ./configure # see --help for optional arguments
- $ make # build
$ npm install # install js dependencies
+ $ make # build
$ make check # run test cases
```
diff --git a/doc/hacking.texi b/doc/hacking.texi
index c498f42..226899d 100644
--- a/doc/hacking.texi
+++ b/doc/hacking.texi
@@ -50,6 +50,7 @@ References for these topics and others are provided in
* Source Files:: Conventions for project files
* Libraries:: The few libraries used by Liza
* Developer Resources:: Where to look for more information
+* TypeScript Migration:: Information on migrating to TypeScript
@end menu
@@ -183,6 +184,8 @@ They further introduce maintenance obligations for keeping up with
@subsection System Libraries
@dnindex GNU ease.js
+Liza was originally developed using JavaScript
+ (first ECMAScript@tie{}3, and then ECMAScript@tie{}5).
JavaScript does not natively support the classical object-oriented
model familiar to users of more traditional classical
object-oriented languages like Java, C++, C#, and@tie{}PHP.
@@ -201,6 +204,14 @@ The @code{class} keyword introduced in ECMAScript@tie{} is largely
primary concerns of ease.js,
nor does it provide traits.
+@dnindex TypeScript
+@emph{The project is now migrating toward TypeScript},
+ so new code should not use ease.js unless required
+ and an effort should be made to move existing code away from
+ ease.js.
+For more information on this migration,
+ see @xref{TypeScript Migration}.
+
@subsection Testing Libraries
@dnindex Mocha
@@ -214,6 +225,9 @@ Chai offers a few different styles of assertions (``should'',
``expect'', and ``assert'');
Liza uses @url{http://www.chaijs.com/guide/styles/#expect,``expect''}.
+@devnotice{A library to aid in mocking TypeScript classes needs to be
+ researched.}
+
@subsection UI Libraries
@dnindex jQuery
@@ -253,6 +267,18 @@ All developers should familiarize themselves with the resources
available on MDN so that they understand what type of information is
readily accessible for future reference.
+@dnindex TypeScript
+An overview of TypeScript can be found in its
+ @url{https://www.typescriptlang.org/docs/handbook/basic-types.html,Handbook}.
+The language borrows concepts from a number of others,
+ so many concepts may be familiar to you.
+TypeScript uses structural typing (duck typing).
+In Liza,
+ we also choose to implement nominal typing using ``branding''
+ (@srcrefraw{src/types/misc.d.ts}).
+A @url{https://github.com/microsoft/TypeScript/blob/master/doc/spec.md,language specification}
+ is also available.
+
@dnindex Node.js
The Server (@pxref{Server}) uses Node.js.
Although it's largely abstracted away,
@@ -285,3 +311,165 @@ Database operations in Liza are abstracted away,
For information on specific libraries used by Liza,
@pxref{Libraries}.
+
+
+@node TypeScript Migration
+@section TypeScript Migration
+@dnindex TypeScript
+@helpwanted{}
+
+This section contains notes regarding a migration to TypeScript.
+It is intended to serve as a guide@mdash{
+ }it is not prescriptive.
+
+
+@subsection Migrating Away From GNU ease.js
+Liza was originally written in @easejs.
+TypeScript now provides many features that ease.js was written to address,
+ though not all (most notably traits).
+
+Since ease.js was designed with JavaScript interoperability in mind,
+ and TypeScript generates prototypes from classes,
+ TypeScript classes serve as drop-in replacements under most
+ circumstances.
+However,
+ subtypes must be migrated at the same time as their parents,
+ otherwise type checking in TypeScript cannot properly be performed.
+If this is a concern,
+ @url{https://www.typescriptlang.org/docs/handbook/advanced-types.html#type-guards-and-type-assertions,type assertions}
+ can potentially be used to coerce types during a transition period
+ in conjunction with ease.js'
+ @url{https://www.gnu.org/software/easejs/manual/easejs.html#Type-Checks-and-Polymorphism,@samp{Class.isA}}.
+
+Interfaces do not exist at runtime in TypeScript,
+ but they do in easejs.
+Consequently,
+ you can continue to export an ease.js interface while also exporting
+ a TypeScript interface.
+To do this,
+ continue to export using @samp{module.exports} rather than
+ TypeScript's @samp{export =}.
+
+ease.js implements stackable Scala-like traits.
+Traits are @emph{not} provided by TypeScript.
+Traits will therefore have to be refactored into,
+ for example,
+ decorators or strategies.
+
+
+@subsection Structural Typing
+@dnindex Typing, Duck
+@dnindex Typing, Structural
+TypeScript implements
+ @url{https://en.wikipedia.org/wiki/Structural_typing,structural typing},
+ also called duck typing.
+This means that any two types sharing the same ``shape'' are
+ compatible with one-another.
+
+For classes,
+ this can be mitigated by defining private members,
+ which then ensures that compatible types are indeed subtypes.
+
+Interfaces can be used in either the traditional OOP sense,
+ or as a means to define the shape of some arbitrary object.
+Since interfaces do not define implementation details,
+ the distinction isn't important@mdash{
+ }it does not matter if we receive an instance of an object
+ implementing an interface,
+ or some object arbitrary that just happens to adhere to it.
+
+In other instances where we want to distinguish between two values
+ with otherwise compatible APIs,
+ Nominal Typing below.
+
+
+@subsection Nominal Typing
+@dnindex Typing, Nominal
+It is sometimes desirable to distinguish between two otherwise
+ compatible types.
+Consider, for example, a user@tie{}id and a Unix timestamp.
+Both are of type @code{number},
+ but it's desirable to ensure that one is not used where another is
+ expected.
+
+TypeScript doesn't directly support
+ @url{https://en.wikipedia.org/wiki/Nominal_typing,nominal typing},
+ where compatibility of data types are determined by name.
+Liza uses a convention called ``branding'',
+ abstracted behind a @code{NominalType} generic
+ (defined in @srcrefraw{src/types/misc.d.ts}).
+
+@float Figure, f:nom-type
+@verbatim
+type UnixTimestamp = NominalType<number, 'UnixTimestamp'>;
+type Milliseconds = NominalType<number, 'Milliseconds'>;
+
+function timeElapsed( start: UnixTimestamp, end: UnixTimestamp ): Milliseconds
+{
+ return end - start;
+}
+
+const start = <UnixTimestamp>1571325570000;
+const end = <UnixTimestamp>1571514320000;
+
+// this is okay
+const elapsed = timeElapsed( start, end );
+
+// this is not, since elapsed is of type Milliseconds
+timeElapsed( start, elapsed );
+
+@end verbatim
+@caption{Example of nominal typing}
+@end float
+
+Consider the example in @ref{f:nom-type}.
+Both @code{UnixTimestamp} and @code{Milliseconds} are a @code{number} type,
+ but they have been defined in such a way that their names are part
+ of the type definition.
+Not only does the compiler prevent bugs caused from mixing data,
+ but nominal types also help to make the code self-documenting.
+
+If you want to have self-documenting types @emph{without} employing
+ nominal typing,
+ use type aliases.
+
+There are no prescriptive rules for whether a type should be defined
+ nominally.
+
+In some cases,
+ it's useful to use nominal types after having validated data,
+ so that the compiler can enforce that assumption from that point forward.
+This can be done using
+ @url{https://www.typescriptlang.org/docs/handbook/advanced-types.html#type-guards-and-type-assertions,type assertions}.
+
+@float Figure, f:nom-assert
+@verbatim
+type PositiveInteger = NominalType<number, 'PositiveInteger'>;
+
+const isPositiveInteger = ( x: number ): n is PositiveInteger => n > 0;
+
+const lookupIndex<T>( arr: T[], i: PositiveInteger ): T => arr[ i ];
+
+// untrusted input from the user
+const user_input = readSomeValue();
+
+if ( isPositiveInteger( user_input ) )
+{
+ // user_input is now of type PositiveInteger
+ return lookupIndex( data, user_input );
+}
+@end verbatim
+@caption{Validating nominal types}
+@end float
+
+In @ref{f:nom-assert} above,
+ we only assume something to be a @code{PositiveInteger} after having
+ checked its value.
+After that point,
+ we can use TypeScript's type system to ensure at compile time that
+ we are only using positive integers in certain contexts.
+
+@devnotice{Never cast values
+ (e.g. using @samp{<PositiveInteger>user_input})
+ when type predicates are provided,
+ since that undermines type safety.}
diff --git a/doc/server.texi b/doc/server.texi
index 2053194..b61984e 100644
--- a/doc/server.texi
+++ b/doc/server.texi
@@ -295,6 +295,9 @@ Therefore,
the entire response is mapped into the parent field;
defined return values are used only for filtering.
+When a DataAPI request is made,
+ it supercedes any previous requests that may still be pending for
+ that same index.
@node Encryption Service
diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json
index 4c1cc70..a476f7c 100644
--- a/npm-shrinkwrap.json
+++ b/npm-shrinkwrap.json
@@ -1,20 +1,105 @@
{
"name": "liza",
- "version": "3.9.5",
+ "version": "7.0.0",
"dependencies": {
+ "amqplib": {
+ "version": "0.5.3",
+ "from": "amqplib@0.5.3",
+ "resolved": "https://registry.npmjs.org/amqplib/-/amqplib-0.5.3.tgz",
+ "dependencies": {
+ "bitsyntax": {
+ "version": "0.1.0",
+ "from": "bitsyntax@>=0.1.0 <0.2.0",
+ "resolved": "https://registry.npmjs.org/bitsyntax/-/bitsyntax-0.1.0.tgz",
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "from": "debug@>=2.6.9 <2.7.0",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "dependencies": {
+ "ms": {
+ "version": "2.0.0",
+ "from": "ms@2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz"
+ }
+ }
+ }
+ }
+ },
+ "bluebird": {
+ "version": "3.5.5",
+ "from": "bluebird@>=3.5.2 <4.0.0",
+ "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.5.tgz"
+ },
+ "buffer-more-ints": {
+ "version": "1.0.0",
+ "from": "buffer-more-ints@>=1.0.0 <1.1.0",
+ "resolved": "https://registry.npmjs.org/buffer-more-ints/-/buffer-more-ints-1.0.0.tgz"
+ },
+ "readable-stream": {
+ "version": "1.1.14",
+ "from": "readable-stream@>=1.0.0 <2.0.0 >=1.1.9",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz",
+ "dependencies": {
+ "core-util-is": {
+ "version": "1.0.2",
+ "from": "core-util-is@>=1.0.0 <1.1.0",
+ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
+ },
+ "isarray": {
+ "version": "0.0.1",
+ "from": "isarray@0.0.1",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
+ },
+ "string_decoder": {
+ "version": "0.10.31",
+ "from": "string_decoder@>=0.10.0 <0.11.0",
+ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"
+ },
+ "inherits": {
+ "version": "2.0.4",
+ "from": "inherits@>=2.0.1 <2.1.0",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz"
+ }
+ }
+ },
+ "safe-buffer": {
+ "version": "5.1.2",
+ "from": "safe-buffer@>=5.1.2 <5.2.0",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz"
+ },
+ "url-parse": {
+ "version": "1.4.7",
+ "from": "url-parse@>=1.4.3 <1.5.0",
+ "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.4.7.tgz",
+ "dependencies": {
+ "querystringify": {
+ "version": "2.1.1",
+ "from": "querystringify@>=2.1.1 <3.0.0",
+ "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.1.tgz"
+ },
+ "requires-port": {
+ "version": "1.0.0",
+ "from": "requires-port@>=1.0.0 <2.0.0",
+ "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz"
+ }
+ }
+ }
+ }
+ },
"easejs": {
"version": "0.2.9",
- "from": "easejs@>=0.2.0 <0.3.0",
+ "from": "https://registry.npmjs.org/easejs/-/easejs-0.2.9.tgz",
"resolved": "https://registry.npmjs.org/easejs/-/easejs-0.2.9.tgz"
},
"mongodb": {
"version": "1.2.14",
- "from": "mongodb@1.2.14",
+ "from": "https://registry.npmjs.org/mongodb/-/mongodb-1.2.14.tgz",
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-1.2.14.tgz",
"dependencies": {
"bson": {
"version": "0.1.8",
- "from": "bson@0.1.8",
+ "from": "https://registry.npmjs.org/bson/-/bson-0.1.8.tgz",
"resolved": "https://registry.npmjs.org/bson/-/bson-0.1.8.tgz"
}
}
diff --git a/package.json.in b/package.json.in
index e2a6b6e..29cb8ac 100644
--- a/package.json.in
+++ b/package.json.in
@@ -29,11 +29,16 @@
"amqplib": "0.5.3"
},
"devDependencies": {
- "chai": ">=1.9.1 < 4",
- "chai-as-promised": ">=6.0.0",
- "mocha": "5.2.0",
- "sinon": ">=1.17.4",
- "es6-promise": "~3"
+ "typescript": ">=3.6",
+ "@types/node": "~4.9",
+ "chai": ">=1.9.1 < 4",
+ "@types/chai": ">=1.9.1 < 4",
+ "chai-as-promised": "7.1.0",
+ "@types/chai-as-promised": "7.1.0",
+ "mocha": "5.2.0",
+ "@types/mocha": "5.2.0",
+ "sinon": ">=1.17.4",
+ "es6-promise": "~3"
},
"licenses": [
diff --git a/src/dapi/AutoRetry.js b/src/dapi/AutoRetry.js
index e584398..994ed13 100644
--- a/src/dapi/AutoRetry.js
+++ b/src/dapi/AutoRetry.js
@@ -121,9 +121,9 @@ module.exports = Trait( 'AutoRetry' )
*
* @return {DataApi} self
*/
- 'abstract override public request': function( input, callback )
+ 'abstract override public request': function( input, callback, id )
{
- this._try( input, callback, this._tries );
+ this._try( input, callback, id, this._tries );
return this;
},
@@ -141,7 +141,7 @@ module.exports = Trait( 'AutoRetry' )
*
* @return {undefined}
*/
- 'private _try': function( input, callback, n )
+ 'private _try': function( input, callback, id, n )
{
var _self = this;
@@ -178,10 +178,10 @@ module.exports = Trait( 'AutoRetry' )
( n - 1 ),
function()
{
- _self._try( input, callback, ( n - 1 ) );
+ _self._try( input, callback, id, ( n - 1 ) );
},
complete
);
- } );
+ }, id );
}
} );
diff --git a/src/dapi/BucketDataApi.js b/src/dapi/BucketDataApi.js
index 1be1167..81dcff0 100644
--- a/src/dapi/BucketDataApi.js
+++ b/src/dapi/BucketDataApi.js
@@ -66,7 +66,7 @@ module.exports = Class( 'BucketDataApi' )
*
* @return {BucketDataApi} self
*/
- 'public request': function( data, callback )
+ 'public request': function( data, callback, id )
{
var _self = this.__inst,
rows = [];
diff --git a/src/dapi/DataApi.js b/src/dapi/DataApi.ts
index e0d0a1b..9bd2c40 100644
--- a/src/dapi/DataApi.js
+++ b/src/dapi/DataApi.ts
@@ -19,16 +19,64 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-'use strict';
-
const { Interface } = require( 'easejs' );
/**
+ * Result of DataAPI call
+ *
+ * This seemingly pointless type exists to emphasize that the result of all
+ * DataAPI requests is and must be an array. Overlooking this has been the
+ * source of bugs in the past.
+ */
+export type DataApiResult = DataApiResultItem[];
+
+
+/**
+ * Individual item of DataAPI result
+ *
+ * Each result contains a set of key/value pairs. Usually, the value is a
+ * string or number, but more complex structures may be used server-side.
+ */
+export type DataApiResultItem = Record<string, any>;
+
+
+/**
+ * Inputs to the DataAPI
+ *
+ * Since data originate from the bucket, all values are expected to be
+ * strings.
+ */
+export type DataApiInput = Record<string, string>;
+
+
+/** Name of DataAPI */
+export type DataApiName = NominalType<string, 'DataApiName'>;
+
+
+/**
+ * Generic interface for data transmission
+ *
+ * This is to replace the below easejs interface; see TODO.
+ */
+export interface DataApi
+{
+ request(
+ data: DataApiInput,
+ callback: NodeCallback<DataApiResult>,
+ id: string,
+ ): this;
+}
+
+
+/**
* Provies a generic interface for data transmission. The only assumption that a
* user of this API shall make is that data may be sent and received in some
* arbitrary, implementation-defined format, and that every request for data
* shall yield some sort of response via a callback.
+ *
+ * TODO: Remove in favor of TypeScript interface (requires also converting
+ * subtypes)
*/
module.exports = Interface( 'DataApi',
{
@@ -46,8 +94,9 @@ module.exports = Interface( 'DataApi',
*
* @param {?Object<string,string>|string} data params or post data
* @param {function(?Error,*):string} callback continuation upon reply
+ * @param {string} id unique dapi identifier
*
* @return {DataApi} self
*/
- 'public request': [ 'data', 'callback' ]
+ 'public request': [ 'data', 'callback', 'id' ]
} );
diff --git a/src/dapi/DataApiFactory.js b/src/dapi/DataApiFactory.js
index f84ea32..68ba270 100644
--- a/src/dapi/DataApiFactory.js
+++ b/src/dapi/DataApiFactory.js
@@ -58,7 +58,7 @@ module.exports = Class( 'DataApiFactory',
const nonempty = !!descl.static_nonempty;
const multiple = !!descl.static_multiple;
- const api = this._createDataApi( type, descl, bucket );
+ const api = this.createDataApi( type, descl, bucket );
return RestrictedDataApi(
StaticAdditionDataApi( api, nonempty, multiple, static_data ),
@@ -93,7 +93,7 @@ module.exports = Class( 'DataApiFactory',
*
* @return {DataApi}
*/
- 'private _createDataApi'( type, desc, bucket )
+ 'virtual protected createDataApi'( type, desc, bucket )
{
const source = ( desc.source || '' );
const method = ( desc.method || '' );
diff --git a/src/dapi/DataApiManager.js b/src/dapi/DataApiManager.js
index c0b1fe4..7e5c7fe 100644
--- a/src/dapi/DataApiManager.js
+++ b/src/dapi/DataApiManager.js
@@ -202,7 +202,7 @@ module.exports = Class( 'DataApiManager' )
_self._pendingApiCall[ id ] = undefined;
_self.emit( 'fieldLoaded', name, +index );
}
- } ) )
+ }, id ) )
.catch( e => fc( e ) );
};
diff --git a/src/dapi/QuoteDataApi.js b/src/dapi/QuoteDataApi.js
index c5c8f3c..a8021fa 100644
--- a/src/dapi/QuoteDataApi.js
+++ b/src/dapi/QuoteDataApi.js
@@ -77,7 +77,7 @@ module.exports = Class( 'QuoteDataApi' )
*
* @return {DataApi} self
*/
- 'public request'( data, callback )
+ 'public request'( data, callback, id )
{
this._dapi.request( this.mapData( data ), callback );
},
diff --git a/src/dapi/RestDataApi.js b/src/dapi/RestDataApi.js
index 7f5e89a..f1e46cb 100644
--- a/src/dapi/RestDataApi.js
+++ b/src/dapi/RestDataApi.js
@@ -73,7 +73,7 @@ module.exports = Class( 'RestDataApi' )
*
* @return {RestDataApi} self
*/
- 'public request': function( data, callback )
+ 'public request': function( data, callback, id )
{
var _self = this.__inst;
diff --git a/src/dapi/RestrictedDataApi.js b/src/dapi/RestrictedDataApi.js
index 452bc36..c6e9427 100644
--- a/src/dapi/RestrictedDataApi.js
+++ b/src/dapi/RestrictedDataApi.js
@@ -85,7 +85,7 @@ module.exports = Class( 'RestrictedDataApi' )
*
* @return {DataApi} self
*/
- 'virtual public request': function( data, callback )
+ 'virtual public request': function( data, callback, id )
{
data = data || {};
callback = callback || function() {};
@@ -101,9 +101,9 @@ module.exports = Class( 'RestrictedDataApi' )
{
callback.call( _self,
err,
- _self._checkResponse( response, callback )
+ _self._checkResponse( err, response, callback )
);
- } );
+ }, id );
},
@@ -180,12 +180,13 @@ module.exports = Class( 'RestrictedDataApi' )
* any error events that may be emitted, allowing the handler to associate
* it with the original request and invoke it manually if necessary.
*
+ * @param {Error|null} err error, if any
* @param {Array.<Object>} response response data
* @param {Function} callback callback to be called with response
*
* @return {Object} original object if validations passed; otherwise {}
*/
- 'private _checkResponse': function( response, callback )
+ 'private _checkResponse': function( err, response, callback )
{
// the response should be an array; otherwise, we cannot process it to
// see if the return data is valid (since it would not be in the
@@ -193,10 +194,10 @@ module.exports = Class( 'RestrictedDataApi' )
// decorator should handle that job *before* the data gets to this one)
//
// since ES5 isn't an option, we'll stick with this dirty hack
- if ( !response || !( response.slice ) )
+ if ( err || !response || !( response.slice ) )
{
this.emit( 'error',
- TypeError( 'Response data is not an array' ),
+ err || TypeError( 'Response data is not an array' ),
callback,
response
);
diff --git a/src/dapi/StaticAdditionDataApi.js b/src/dapi/StaticAdditionDataApi.js
index cb1fc19..5f6cfa0 100644
--- a/src/dapi/StaticAdditionDataApi.js
+++ b/src/dapi/StaticAdditionDataApi.js
@@ -66,13 +66,6 @@ module.exports = Class( 'StaticAdditionDataApi' )
*/
__construct: function( data_api, nonempty, multiple, static_data )
{
- if ( !( Class.isA( DataApi, data_api ) ) )
- {
- throw Error(
- 'Expected object of type DataApi; given: ' + data_api
- );
- }
-
this._api = data_api;
this._static = static_data;
this._nonempty = !!nonempty;
@@ -88,7 +81,7 @@ module.exports = Class( 'StaticAdditionDataApi' )
*
* @return {DataApi} self
*/
- 'public request': function( data, callback )
+ 'public request': function( data, callback, id )
{
data = data || {};
callback = callback || function() {};
@@ -110,7 +103,7 @@ module.exports = Class( 'StaticAdditionDataApi' )
err,
_self._unshiftData( response )
);
- } );
+ }, id );
},
diff --git a/src/dapi/format/JsonResponse.js b/src/dapi/format/JsonResponse.js
index e2b75de..f736206 100644
--- a/src/dapi/format/JsonResponse.js
+++ b/src/dapi/format/JsonResponse.js
@@ -47,14 +47,14 @@ module.exports = Trait( 'JsonResponse' )
*
* @return {DataApi} self
*/
- 'virtual abstract override public request': function( data, callback )
+ 'virtual abstract override public request': function( data, callback, id )
{
var _self = this;
this.__super( data, function( err, resp )
{
_self._tryParse( err, resp, callback );
- } );
+ }, id );
return this;
},
diff --git a/src/dapi/format/ResponseApply.js b/src/dapi/format/ResponseApply.js
index 1c39d7e..17d6225 100644
--- a/src/dapi/format/ResponseApply.js
+++ b/src/dapi/format/ResponseApply.js
@@ -65,12 +65,12 @@ module.exports = Trait( 'ResponseApply' )
*
* @return {DataApi} self
*/
- 'virtual abstract override public request'( data, callback )
+ 'virtual abstract override public request'( data, callback, id )
{
this.__super( data, ( e, retdata ) =>
{
callback( e, this._dataf( retdata ) );
- } );
+ }, id );
return this;
},
diff --git a/src/dapi/http/HttpDataApi.js b/src/dapi/http/HttpDataApi.js
index 352b519..b99f551 100644
--- a/src/dapi/http/HttpDataApi.js
+++ b/src/dapi/http/HttpDataApi.js
@@ -128,7 +128,7 @@ module.exports = Class( 'HttpDataApi' )
*
* @throws {TypeError} on validation failure
*/
- 'virtual public request': function( data, callback )
+ 'virtual public request': function( data, callback, id )
{
// null is a good indicator of "I have no intent to send any data";
// empty strings and objects are not, since those are valid data
diff --git a/src/document/Document.ts b/src/document/Document.ts
new file mode 100644
index 0000000..0db893a
--- /dev/null
+++ b/src/document/Document.ts
@@ -0,0 +1,37 @@
+/**
+ * Document (quote) interface
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * The term "Quote" is synonymous with "Document"; this project is moving
+ * more toward the latter as it is further generalized.
+ */
+
+/**
+ * Document identifier
+ */
+export type DocumentId = NominalType<number, 'DocumentId'>;
+
+
+/**
+ * Quote (Document) id
+ *
+ * Where the term "Quote" is still used, this will allow for type
+ * compatibility and an easy transition.
+ */
+export type QuoteId = DocumentId;
diff --git a/src/error/ChainedError.ts b/src/error/ChainedError.ts
new file mode 100644
index 0000000..efe9b99
--- /dev/null
+++ b/src/error/ChainedError.ts
@@ -0,0 +1,73 @@
+/**
+ * Uniform error handling for Liza: error chaining
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import { ___Writable } from 'naughty';
+
+
+/**
+ * An Error augmented to include information about an underlying cause
+ *
+ * To create new chains, use the `chain` function.
+ *
+ * Chaining should be used when an error is caught and transformed into
+ * another, more specific error. By maintaining a reference to an existing
+ * error, context is not lost, which can be helpful for debugging and
+ * logging.
+ *
+ * Chains may be nested to an arbitrary depth, but because of the nature of
+ * JavaScript's errors, recursive chain type checks must be done at runtime.
+ */
+export interface ChainedError<T extends Error = Error> extends Error
+{
+ readonly chain: T,
+}
+
+
+/**
+ * Type predicate for `ChainedError`
+ *
+ * This predicate can be used at runtime to determine whether an error is
+ * chained.
+ *
+ * @param e error object
+ *
+ * @return whether `e` is of type ChainedError
+ */
+export const isChained = ( e: Error ): e is ChainedError =>
+ ( <ChainedError>e ).chain !== undefined;
+
+
+/**
+ * Chains two `Error`s
+ *
+ * This is intended to be used as if it were a constructor, where the first
+ * argument is a new `Error` instance.
+ *
+ * @param enew new error
+ * @param eprev error to chain
+ *
+ * @return `enew` with `eprev` chained
+ */
+export function chain( enew: Error, eprev: Error ): ChainedError
+{
+ ( <___Writable<ChainedError>>enew ).chain = eprev;
+ return <ChainedError>enew;
+}
diff --git a/src/error/ContextError.ts b/src/error/ContextError.ts
new file mode 100644
index 0000000..1286a8b
--- /dev/null
+++ b/src/error/ContextError.ts
@@ -0,0 +1,93 @@
+/**
+ * Uniform error handling for Liza: error context
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * This context system is intended to play nicely with how Error objects are
+ * typically used in JavaScript. As such, rather than creating new error
+ * prototypes / subclasses, these rely on structural typing to augment
+ * existing `Error` objects.
+ */
+
+import { ___Writable } from 'naughty';
+
+
+/**
+ * Error with additional context regarding its cause
+ *
+ * Errors may be augmented with key/value data (see `ErrorContext`)
+ * containing data that will be helpful for debugging the cause of the
+ * error. The context should be expected to appear in structured logs, so
+ * it shouldn't include sensitive data without some mitigation layer.
+ *
+ * A context may be optionally typed, but note that such context will
+ * generally be lost any time promises are used, so type predicates will
+ * need to be used to restore the type with information at runtime.
+ *
+ * Since the context is intended primarily for debugging, it shouldn't be
+ * relied on to drive control flow unless absolutely necessary, in which
+ * case an explicit context should be used.
+ */
+export interface ContextError<T extends ErrorContext = ErrorContext>
+ extends Error
+{
+ readonly context: T,
+}
+
+
+/**
+ * Key/value context for an error
+ *
+ * Rather than accepting data of an arbitrary type, we force key/value for
+ * reasons of extensibility and consistency: if more information is needed
+ * in the future, the type will remain unchanged. The values, however, may
+ * include any arbitrary data.
+ */
+export type ErrorContext = { readonly [P: string]: any };
+
+
+/**
+ * Type predicate for `ContextError`
+ *
+ * Note that this is a predicate for a generic `ContextError`, type, which
+ * is equivalent to `ContextError<ErrorContext>`. Other contexts must
+ * define their own predicates.
+ *
+ * @param e error object
+ *
+ * @return whether `e` is of type `ContextError`
+ */
+export const hasContext = ( e: Error ): e is ContextError =>
+ ( <ContextError>e ).context !== undefined;
+
+
+/**
+ * Adds context to an error
+ *
+ * This is intended to be used as if it were a constructor, where the first
+ * argument is a new `Error` instance.
+ *
+ * @param enew error object to add context to
+ * @param context key/value context information
+ */
+export function context<T = ErrorContext>( enew: Error, context: T ):
+ ContextError<T>
+{
+ ( <___Writable<ContextError<T>>>enew ).context = context;
+ return <ContextError<T>>enew;
+}
diff --git a/src/server/DocumentServer.js b/src/server/DocumentServer.js
index 8370258..c8abb4a 100644
--- a/src/server/DocumentServer.js
+++ b/src/server/DocumentServer.js
@@ -48,17 +48,40 @@ const {
JsonServerResponse,
ServerDataApiFactory,
},
+
+ token: {
+ MongoTokenDao: { MongoTokenDao },
+ },
},
} = require( '../..' );
/**
* Vanilla document server
+ *
+ * XXX: This is a mess, and it's only getting worse with dependencies
+ * instantiated everywhere. Everything should be instantiated in one place
+ * rather than part of them being passed in here. See controller.js.
*/
module.exports = Class( 'DocumentServer',
{
- 'public create': ( dao, logger, enc_service, origin_url, conf ) =>
- Promise.all( [
+ /**
+ * Create document server
+ *
+ * See above XXX.
+ *
+ * @param {MongoServerDao} dao server DAO
+ * @param {Logger} logger log manager
+ * @param {EncryptionService} enc_service encryption service
+ * @param {string} origin_url HTTP_ORIGIN_URL
+ * @param {ConfStore} conf configuration store
+ * @param {MongoConnection} collection database collection
+ *
+ * @return {Promise<Server>}
+ */
+ 'public create'( dao, logger, enc_service, origin_url, conf, collection )
+ {
+ return Promise.all( [
conf.get( 'dapi' ),
] ).then( ([ dapi_conf ]) => Server(
new JsonServerResponse.create(),
@@ -68,17 +91,46 @@ module.exports = Class( 'DocumentServer',
DataProcessor(
bucket_filter,
- ( apis, request ) => DataApiManager(
- ServerDataApiFactory(
- origin_url || request.getOrigin(),
- request,
- dapi_conf
- ),
- apis
+ ( apis, request, quote ) => this._createDapiManager(
+ apis, request, origin_url, dapi_conf, quote, collection
),
DapiMetaSource( QuoteDataBucket ),
StagingBucket
),
ProgramInit()
- ) )
+ ) );
+ },
+
+
+ /**
+ * Create new DataApiManager
+ *
+ * See above XXX.
+ *
+ * @param {Object} apis API definitions
+ * @param {Request} request Node HTTP request
+ * @param {string} origin_url HTTP_ORIGIN_URL
+ * @param {Object} dapi_conf dapi configuration
+ * @param {Quote} quote current quote for request
+ * @param {MongoConnection} collection database collection
+ */
+ 'private _createDapiManager'(
+ apis, request, origin_url, dapi_conf, quote, collection
+ )
+ {
+ return DataApiManager(
+ ServerDataApiFactory(
+ origin_url || request.getOrigin(),
+ request,
+ dapi_conf,
+ quote.getId(),
+ new MongoTokenDao(
+ collection,
+ 'dapitok',
+ () => Math.floor( ( new Date() ).getTime() / 1000 )
+ )
+ ),
+ apis
+ );
+ },
} );
diff --git a/src/server/Server.js b/src/server/Server.js
index 6592faf..8a42ab0 100644
--- a/src/server/Server.js
+++ b/src/server/Server.js
@@ -1148,7 +1148,7 @@ module.exports = Class( 'Server' )
const { filtered, dapis, meta_clear } =
server._dataProcessor.processDiff(
- parsed_data, request, program, bucket
+ parsed_data, request, program, bucket, quote
);
server._monitorMetadataPromise( quote, dapis, meta_clear );
diff --git a/src/server/daemon/controller.js b/src/server/daemon/controller.js
index 680fde7..7e8ca54 100644
--- a/src/server/daemon/controller.js
+++ b/src/server/daemon/controller.js
@@ -87,7 +87,12 @@ const {
RatingServicePublish,
RatingServiceSubmitNotify,
TokenedService,
- TokenDao,
+ },
+
+ token: {
+ MongoTokenDao: {
+ MongoTokenDao
+ },
},
request: {
@@ -122,47 +127,50 @@ exports.init = function( logger, enc_service, conf )
var db = _createDB( logger );
const dao = MongoServerDao( db );
- _createDocumentServer( dao, logger, enc_service, conf ).then( srv =>
+ db.collection( 'quotes', function( err, collection )
{
- server = srv;
+ _createDocumentServer( dao, logger, enc_service, conf, collection ).then( srv =>
+ {
+ server = srv;
- server_cache = _createCache( server );
- server.init( server_cache, exports.rater );
+ server_cache = _createCache( server );
+ server.init( server_cache, exports.rater );
- // TODO: temporary proof-of-concept
- rating_service = RatingService.use(
- RatingServicePublish( amqplib, exports.post_rate_publish, logger )
- )(
- logger, dao, server, exports.rater
- );
-
- // TODO: exports.init needs to support callbacks; this will work, but
- // only because it's unlikely that we'll get a request within
- // milliseconds of coming online
- _initExportService( db, function( service )
- {
- c1_export_service = service;
- } );
+ // TODO: temporary proof-of-concept
+ rating_service = RatingService.use(
+ RatingServicePublish( amqplib, exports.post_rate_publish, logger )
+ )(
+ logger, dao, server, exports.rater
+ );
- server.on( 'quotePverUpdate', function( quote, program, event )
- {
- // let them know that we're going to be a moment
- var c = event.wait();
+ // TODO: exports.init needs to support callbacks; this will work, but
+ // only because it's unlikely that we'll get a request within
+ // milliseconds of coming online
+ _initExportService( collection, function( service )
+ {
+ c1_export_service = service;
+ } );
- getCleaner( program ).clean( quote, function( err )
+ server.on( 'quotePverUpdate', function( quote, program, event )
{
- // report on our success/failure
- if ( err )
- {
- event.bad( err );
- }
- else
+ // let them know that we're going to be a moment
+ var c = event.wait();
+
+ getCleaner( program ).clean( quote, function( err )
{
- event.good();
- }
+ // report on our success/failure
+ if ( err )
+ {
+ event.bad( err );
+ }
+ else
+ {
+ event.good();
+ }
- // we're done
- c();
+ // we're done
+ c();
+ } );
} );
} );
} );
@@ -198,7 +206,7 @@ function _createDB( logger )
return db;
}
-function _createDocumentServer( dao, logger, enc_service, conf )
+function _createDocumentServer( dao, logger, enc_service, conf, collection )
{
const origin_url = process.env.HTTP_ORIGIN_URL || '';
@@ -214,52 +222,52 @@ function _createDocumentServer( dao, logger, enc_service, conf )
}
return DocumentServer()
- .create( dao, logger, enc_service, origin_url, conf );
+ .create( dao, logger, enc_service, origin_url, conf, collection );
}
-function _initExportService( db, callback )
+function _initExportService( collection, callback )
{
- db.collection( 'quotes', function( err, collection )
- {
- if ( collection === null )
- {
- return;
- }
+ var spoof_host = (
+ ''+(
+ process.env.C1_EXPORT_HOST
+ || process.env.LV_RATE_DOMAIN
+ || process.env.LV_RATE_HOST
+ ).trim()
+ );
- var spoof_host = (
- ''+(
- process.env.C1_EXPORT_HOST
- || process.env.LV_RATE_DOMAIN
- || process.env.LV_RATE_HOST
- ).trim()
- );
+ var spoof = SessionSpoofHttpClient( http, spoof_host );
- var spoof = SessionSpoofHttpClient( http, spoof_host );
+ callback(
+ ExportService
+ .use( TokenedService(
+ 'c1import',
+ new MongoTokenDao( collection, "exports", getUnixTimestamp ),
+ function tokgen()
+ {
+ var shasum = crypto.createHash( 'sha1' );
+ shasum.update( ''+Math.random() );
- callback(
- ExportService
- .use( TokenedService(
- 'c1import',
- TokenDao( collection ),
- function tokgen()
- {
- var shasum = crypto.createHash( 'sha1' );
- shasum.update( ''+Math.random() );
+ return shasum.digest( 'hex' );
+ },
+ function newcapturedResponse( request, callback )
+ {
+ return UserResponse
+ .use( CapturedUserResponse( callback ) )
+ ( request );
+ }
+ ) )
+ ( spoof )
+ );
+}
- return shasum.digest( 'hex' );
- },
- function newcapturedResponse( request, callback )
- {
- return UserResponse
- .use( CapturedUserResponse( callback ) )
- ( request );
- }
- ) )
- ( spoof )
- );
- } );
+/**
+ * Retrieve current date as a Unix timestamp
+ */
+function getUnixTimestamp()
+{
+ return Math.floor( ( new Date() ).getTime() / 1000 );
}
diff --git a/src/server/dapi/TokenedDataApi.ts b/src/server/dapi/TokenedDataApi.ts
new file mode 100644
index 0000000..85d4a25
--- /dev/null
+++ b/src/server/dapi/TokenedDataApi.ts
@@ -0,0 +1,168 @@
+/**
+ * DataAPI backed by tokens for logging and precedence
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import { DataApi, DataApiInput, DataApiResult } from "../../dapi/DataApi";
+import { TokenStore } from "../token/store/TokenStore";
+import { Token, TokenState, TokenNamespace } from "../token/Token";
+import { context } from "../../error/ContextError";
+
+
+/** Token store constructor/factory */
+type TokenStoreCtor = ( ns: TokenNamespace ) => TokenStore;
+
+
+/**
+ * Wrap DataAPI request in a token
+ *
+ * If another request is made before the first finishes, then the first will
+ * return in error stating that it has been superceded. Under normal
+ * circumstances, this otherwise acts like a typical DataAPI, with the
+ * side-effect of having tokens created and replies logged.
+ *
+ * TODO: log inputs to token as data?
+ */
+export class TokenedDataApi implements DataApi
+{
+ /**
+ * Wrap DataAPI
+ *
+ * The provided DataAPI will be wrapped such that requests will have
+ * tokens created, namespaced to the id of the request. A token store
+ * will be created using the provided `_tstoreCtor` for each such id.
+ *
+ * @param _api - DataAPI to decorate
+ * @param _tstoreCtor - `TokenStore` constructor by namespace
+ */
+ constructor(
+ private readonly _api: DataApi,
+ private readonly _tstoreCtor: TokenStoreCtor
+ ) {}
+
+
+ /**
+ * Perform request and generate corresponding token
+ *
+ * A token is created before each request using a store initialized to a
+ * namespace identified by `id`. If a token associated with a request
+ * is still the most recently created token for that namespace by the
+ * time the request completes, then the request is fulfilled as
+ * normal. But if another request has since been made in the same
+ * namespace, then the request is considered to be superceded, and is
+ * rejected in error.
+ *
+ * The token will be completed in either case so that there is a log of
+ * the transaction.
+ *
+ * @param data - request data
+ * @param callback - success/failure callback
+ * @param id - unique dapi identifier
+ *
+ * @return self
+ */
+ request(
+ data: DataApiInput,
+ callback: NodeCallback<DataApiResult>,
+ id: string
+ ): this
+ {
+ const store = this._tstoreCtor( <TokenNamespace>id );
+
+ // TODO: we should probably store raw data rather than converting it
+ // to JSON
+ store.createToken().then( token =>
+ this._dapiRequest( data, id ).then( resp_data =>
+ store.completeToken( token, JSON.stringify( resp_data ) )
+ .then( newtok =>
+ this._replyUnlessStale(
+ store, newtok, resp_data, callback, id
+ )
+ )
+ )
+ )
+ .catch( e => callback( e, null ) );
+
+ return this;
+ }
+
+
+ /**
+ * Wrap underlying DataAPI request in a Promise
+ *
+ * The `DataApi` interface still uses the oldschool Node
+ * callbacks. This lifts it into a Promise.
+ *
+ * @param data - request data
+ * @param id - DataAPI id
+ *
+ * @return request as a Promise
+ */
+ private _dapiRequest( data: DataApiInput, id: string ): Promise<DataApiResult>
+ {
+ return new Promise( ( resolve, reject ) =>
+ {
+ this._api.request( data, ( e, resp_data ) =>
+ {
+ if ( e || resp_data === null )
+ {
+ return reject( e );
+ }
+
+ resolve( resp_data );
+ }, id );
+ } );
+ }
+
+
+ /**
+ * Invoke callback successfully with data unless the request is stale
+ *
+ * A request is stale/superceded if it is not the most recently created
+ * token for the namespace, implying that another request has since
+ * taken place.
+ *
+ * @param newtok - completed token
+ * @param resp_data - response data from underlying DataAPI
+ * @param callback - success/failure callback
+ * @param id - DataApi id
+ */
+ private _replyUnlessStale(
+ store: TokenStore,
+ newtok: Token<TokenState.DONE>,
+ resp_data: DataApiResult,
+ callback: NodeCallback<DataApiResult>,
+ id: string
+ ): Promise<void>
+ {
+ if ( newtok.last_created )
+ {
+ return store.acceptToken( newtok, null )
+ .then( () => callback( null, resp_data ) );
+ }
+
+ return store.killToken( newtok, null ).then( () => callback(
+ context(
+ Error( "Request superceded" ),
+ { id: id },
+ ),
+ null
+ ) );
+ }
+}
diff --git a/src/server/request/DataProcessor.js b/src/server/request/DataProcessor.js
index a746be3..3d9bbf6 100644
--- a/src/server/request/DataProcessor.js
+++ b/src/server/request/DataProcessor.js
@@ -87,10 +87,10 @@ module.exports = Class( 'DataProcessor',
*
* @return {Object} processed diff
*/
- 'public processDiff'( data, request, program, bucket )
+ 'public processDiff'( data, request, program, bucket, quote )
{
const filtered = this.sanitizeDiff( data, request, program );
- const dapi_manager = this._dapif( program.apis, request );
+ const dapi_manager = this._dapif( program.apis, request, quote );
const staging = this._stagingCtor( bucket );
// forbidBypass will force diff generation on initQuote
diff --git a/src/server/request/ServerDataApiFactory.js b/src/server/request/ServerDataApiFactory.js
index 087d70e..522fb0c 100644
--- a/src/server/request/ServerDataApiFactory.js
+++ b/src/server/request/ServerDataApiFactory.js
@@ -20,6 +20,8 @@
*/
const { Class } = require( 'easejs' );
+const crypto = require( 'crypto' );
+
const {
dapi: {
DataApiFactory,
@@ -31,6 +33,16 @@ const {
store: {
StoreMissError,
},
+ server: {
+ dapi: {
+ TokenedDataApi: { TokenedDataApi },
+ },
+ token: {
+ store: {
+ PersistentTokenStore: { PersistentTokenStore },
+ },
+ },
+ },
} = require( '../..' );
@@ -58,12 +70,48 @@ module.exports = Class( 'ServerDataApiFactory' )
*/
'private _conf': null,
+ /**
+ * Document (quote) id
+ * @type {DocumentId}
+ */
+ 'private _doc_id': 0,
+
- constructor( origin, session, conf )
+ constructor( origin, session, conf, doc_id, tokdao )
{
this._origin = ''+origin;
this._session = session;
this._conf = conf;
+ this._doc_id = doc_id;
+ this._tokdao = tokdao;
+ },
+
+
+ 'override protected createDataApi'( type, desc, bucket )
+ {
+ return new TokenedDataApi(
+ this.__super( type, desc, bucket ),
+ token_ns => new PersistentTokenStore(
+ this._tokdao,
+ this._doc_id,
+ token_ns,
+ () => this._generateTokenId()
+ )
+ );
+ },
+
+
+ /**
+ * Generate random token identifier
+ *
+ * @return {string} unique token identifier
+ */
+ 'private _generateTokenId'()
+ {
+ var shasum = crypto.createHash( 'sha1' );
+ shasum.update( ''+Math.random() );
+
+ return shasum.digest( 'hex' );
},
diff --git a/src/server/service/TokenDao.js b/src/server/service/TokenDao.js
deleted file mode 100644
index 09de75e..0000000
--- a/src/server/service/TokenDao.js
+++ /dev/null
@@ -1,233 +0,0 @@
-/**
- * Token state management
- *
- * Copyright (C) 2010-2019 R-T Specialty, LLC.
- *
- * This file is part of the Liza Data Collection Framework.
- *
- * liza is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License as
- * published by the Free Software Foundation, either version 3 of the
- * License, or (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-var Class = require( 'easejs' ).Class;
-
-
-/**
- * Manages token updates
- *
- * Note that this is tightly coupled with MongoDB.
- */
-module.exports = Class( 'TokenDao',
-{
- /**
- * @type {MongoCollection} mongo database collection
- */
- 'private _collection': null,
-
-
- /**
- * Initialize connection
- *
- * @param {MongoCollection} collection token Mongo collection
- */
- 'public __construct': function( collection )
- {
- this._collection = collection;
- },
-
-
- /**
- * Create or update a token record
- *
- * The token entry is entered in the token log, and then the current
- * entry is updated to reflect the changes. The operation is atomic.
- *
- * @param {number} quote_id unique quote identifier
- * @param {string} ns token namespace
- * @param {string} token token value
- * @param {string} data token data, if any
- * @param {string} status arbitrary token type
- *
- * @param {function(*)} callback with error or null (success)
- *
- * @return {TokenDao} self
- */
- 'public updateToken': function( quote_id, ns, token, type, data, callback )
- {
- var token_data = {},
- token_log = {},
- root = this._genRoot( ns ) + '.',
- current_ts = Math.floor( ( new Date() ).getTime() / 1000 );
-
- var token_entry = {
- type: type,
- timestamp: current_ts,
- };
-
- if ( data )
- {
- token_entry.data = data;
- }
-
- token_data[ root + 'last' ] = token;
- token_data[ root + 'lastStatus' ] = token_entry;
- token_data[ root + token + '.status' ] = token_entry;
-
- token_log[ root + token + '.statusLog' ] = token_entry;
-
- this._collection.update(
- { id: +quote_id },
- {
- $set: token_data,
- $push: token_log
- },
- { upsert: true },
-
- function ( err, docs )
- {
- callback( err || null );
- }
- );
-
- return this;
- },
-
-
- /**
- * Retrieve existing token under the namespace NS, if any, for the quote
- * identified by QUOTE_ID
- *
- * If a TOKEN_ID is provided, only that token will be queried; otherwise,
- * the most recently created token will be the subject of the query.
- *
- * @param {number} quote_id quote identifier
- * @param {string} ns token namespace
- * @param {string} token_id token identifier (unique to NS)
- *
- * @param {function(?Error,{{id: string, status: string}})} callback
- *
- * @return {TokenDao} self
- */
- 'public getToken': function( quote_id, ns, token_id, callback )
- {
- var _self = this;
-
- var root = this._genRoot( ns ) + '.',
- fields = {};
-
- fields[ root + 'last' ] = 1;
- fields[ root + 'lastStatus' ] = 1;
-
- if ( token_id )
- {
- // XXX: injectable
- fields[ root + token_id ] = 1;
- }
-
- this._collection.findOne(
- { id: +quote_id },
- { fields: fields },
- function( err, data )
- {
- if ( err )
- {
- callback( err, null );
- return;
- }
-
- if ( !data || ( data.length === 0 ) )
- {
- callback( null, null );
- return;
- }
-
- var exports = data.exports || {},
- ns_data = exports[ ns ] || {};
-
- callback(
- null,
- ( token_id )
- ? _self._getRequestedToken( token_id, ns_data )
- : _self._getLatestToken( ns_data )
- );
- }
- );
-
- return this;
- },
-
-
- /**
- * Retrieve latest token data, or `null` if none
- *
- * @param {{last: string, lastStatus: string}} ns_data namespace data
- *
- * @return {?{{id: string, status: string}}} data of latest token in
- * namespace
- */
- 'private _getLatestToken': function( ns_data )
- {
- var last = ns_data.last;
-
- if ( !last )
- {
- return null;
- }
-
- return {
- id: last,
- status: ns_data.lastStatus,
- };
- },
-
-
- /**
- * Retrieve latest token data, or `null` if none
- *
- * @param {string} token_id token identifier for namespace associated
- * with NS_DATA
- *
- * @param {{last: string, lastStatus: string}} ns_data namespace data
- *
- * @return {?{{id: string, status: string}}} data of requested token
- */
- 'private _getRequestedToken': function( token_id, ns_data )
- {
- var reqtok = ns_data[ token_id ];
-
- if ( !reqtok )
- {
- return null;
- }
-
- return {
- id: token_id,
- status: reqtok.status,
- };
- },
-
-
- /**
- * Determine token root for the given namespace
- *
- * @param {string} ns token namespace
- *
- * @return {string} token root for namespace NS
- */
- 'private _genRoot': function( ns )
- {
- // XXX: injectable
- return 'exports.' + ns;
- },
-} );
-
diff --git a/src/server/service/TokenedService.js b/src/server/service/TokenedService.js
index 6cb86f5..9d805cf 100644
--- a/src/server/service/TokenedService.js
+++ b/src/server/service/TokenedService.js
@@ -21,8 +21,7 @@
var Trait = require( 'easejs' ).Trait,
Class = require( 'easejs' ).Class,
- Service = require( './Service' ),
- TokenDao = require( './TokenDao' );
+ Service = require( './Service' );
/**
@@ -96,11 +95,6 @@ module.exports = Trait( 'TokenedService' )
*/
__mixin: function( namespace, dao, tokgen, capture_gen )
{
- if ( !Class.isA( TokenDao, dao ) )
- {
- throw TypeError( 'Instance of TokenDao expected' );
- }
-
if ( typeof tokgen !== 'function' )
{
throw TypeError( 'Token generator must be a function' );
@@ -242,31 +236,9 @@ module.exports = Trait( 'TokenedService' )
*/
'private _getQuoteToken': function( quote, tokid, callback )
{
- this._dao.getToken(
- quote.getId(),
- this._ns,
- tokid,
- function( err, token )
- {
- if ( err )
- {
- callback( err, null );
- return;
- }
-
- if ( tokid && !token )
- {
- callback(
- Error( "Token not found: " + tokid ),
- null
- );
-
- return;
- }
-
- callback( null, token );
- }
- );
+ this._dao.getToken( quote.getId(), this._ns, tokid )
+ .then( token => callback( null, token ) )
+ .catch( err => callback( err, null ) );
},
@@ -595,29 +567,9 @@ module.exports = Trait( 'TokenedService' )
var tokid = this._tokgen( program, quote ),
status = this.getDefaultTokenStatus();
- this._dao.updateToken(
- quote.getId(),
- this._ns,
- tokid,
- status,
- null,
- function( err )
- {
- if ( err )
- {
- callback( err, null );
- return;
- }
-
- callback(
- null,
- {
- id: tokid,
- status: status,
- }
- );
- }
- );
+ this._dao.updateToken( quote.getId(), this._ns, tokid, status, null )
+ .then( () => callback( null, { id: tokid, status: status } ) )
+ .catch( err => callback( err, null ) );
},
@@ -642,33 +594,13 @@ module.exports = Trait( 'TokenedService' )
*
* @param {function(?Error,Object)} callback continuation
*/
- 'virtual virtual protected killToken': function( quote, token, callback )
+ 'virtual protected killToken': function( quote, token, callback )
{
callback = callback || function() {};
- this._dao.updateToken(
- quote.getId(),
- this._ns,
- token.id,
- 'DEAD',
- null,
- function( err )
- {
- if ( err )
- {
- callback( err, null );
- return;
- }
-
- callback(
- null,
- {
- id: token,
- status: 'DEAD',
- }
- );
- }
- );
+ this._dao.updateToken( quote.getId(), this._ns, token.id, 'DEAD', null )
+ .then( () => callback( null, { id: token, status: 'DEAD' } ) )
+ .catch( err => callback( err, null ) );
},
@@ -686,29 +618,9 @@ module.exports = Trait( 'TokenedService' )
{
callback = callback || function() {};
- this._dao.updateToken(
- quote.getId(),
- this._ns,
- token.id,
- 'ACCEPTED',
- null,
- function( err )
- {
- if ( err )
- {
- callback( err, null );
- return;
- }
-
- callback(
- null,
- {
- id: token,
- status: 'ACCEPTED',
- }
- );
- }
- );
+ this._dao.updateToken( quote.getId(), this._ns, token.id, 'ACCEPTED', null )
+ .then( () => callback( null, { id: token, status: 'ACCEPTED' } ) )
+ .catch( err => callback( err, null ) );
},
@@ -725,31 +637,9 @@ module.exports = Trait( 'TokenedService' )
*/
'virtual protected completeToken': function( quote, token, data, callback )
{
- callback = callback || function() {};
-
- this._dao.updateToken(
- quote.getId(),
- this._ns,
- token.id,
- 'DONE',
- data,
- function( err )
- {
- if ( err )
- {
- callback( err, null );
- return;
- }
-
- callback(
- null,
- {
- id: token,
- status: 'DONE',
- }
- );
- }
- );
+ this._dao.updateToken( quote.getId(), this._ns, token.id, 'DONE', data )
+ .then( () => callback( null, { id: token, status: 'DONE' } ) )
+ .catch( err => callback( err, null ) );
},
} );
diff --git a/src/server/token/MongoTokenDao.ts b/src/server/token/MongoTokenDao.ts
new file mode 100644
index 0000000..ee90d7a
--- /dev/null
+++ b/src/server/token/MongoTokenDao.ts
@@ -0,0 +1,393 @@
+/**
+ * Token state management
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import {
+ TokenDao,
+ TokenData,
+ TokenEntry,
+ TokenNamespaceData,
+ TokenNamespaceResults,
+ TokenQueryResult,
+ TokenStateHistory,
+ TokenStatus,
+} from "./TokenDao";
+
+import { DocumentId } from "../../document/Document";
+import { TokenId, TokenNamespace, TokenState } from "./Token";
+import { UnknownTokenError } from "./UnknownTokenError";
+import { context } from "../../error/ContextError";
+
+
+/**
+ * Manages token updates
+ *
+ * This uses MongoDB as the underlying database.
+ */
+export class MongoTokenDao implements TokenDao
+{
+ /**
+ * Initialize connection
+ *
+ * @param _collection Mongo collection
+ * @param _root_field topmost field in mongo document
+ * @param _date_ctor Date constructor
+ */
+ constructor(
+ private readonly _collection: MongoCollection,
+ private readonly _root_field: string,
+ private readonly _getTimestamp: () => UnixTimestamp,
+ ) {}
+
+
+ /**
+ * Create or update a token record
+ *
+ * The token entry is entered in the token log, and then the current
+ * entry is updated to reflect the changes. The operation is atomic.
+ *
+ * @param doc_id unique document identifier
+ * @param ns token namespace
+ * @param token token value
+ * @param data token data, if any
+ * @param status arbitrary token type
+ *
+ * @return token data
+ */
+ updateToken(
+ doc_id: DocumentId,
+ ns: TokenNamespace,
+ token_id: TokenId,
+ type: TokenState,
+ data: string | null,
+ ): Promise<TokenData>
+ {
+ const root = this._genRoot( ns ) + '.';
+
+ const token_entry: TokenStatus = {
+ type: type,
+ timestamp: this._getTimestamp(),
+ data: data,
+ };
+
+ const token_data = {
+ [ root + 'last' ]: token_id,
+ [ root + 'lastState.' + type ]: token_id,
+ [ root + 'lastStatus' ]: token_entry,
+ [ root + token_id + '.status' ]: token_entry,
+ };
+
+ const token_log = {
+ [ root + token_id + '.statusLog' ]: token_entry,
+ };
+
+ return new Promise( ( resolve, reject ) =>
+ {
+ this._collection.findAndModify(
+ { id: +doc_id },
+ [],
+ {
+ $set: token_data,
+ $push: token_log
+ },
+ {
+ upsert: true,
+ new: false,
+ fields: {
+ [ root + 'last' ]: 1,
+ [ root + 'lastState' ]: 1,
+ [ root + 'lastStatus' ]: 1,
+ [ root + token_id + '.status' ]: 1,
+ },
+ },
+
+ ( err: Error|null, prev_data ) =>
+ {
+ if ( err )
+ {
+ reject( err );
+ return;
+ }
+
+ const prev_result = <TokenNamespaceResults>
+ prev_data[ this._root_field ] || {};
+
+ const prev_ns = prev_result[ ns ];
+
+ resolve( {
+ id: token_id,
+ status: token_entry,
+ prev_status: this._getPrevStatus( prev_ns, token_id ),
+ prev_last: this._getPrevLast( prev_ns ),
+ prev_state: this._getPrevState( prev_ns ),
+ } );
+ }
+ );
+ } );
+ }
+
+
+ /**
+ * Determine previous token status, or produce `null`
+ *
+ * @param prev_ns previous namespace data
+ * @param token_id token identifier
+ *
+ * @return previous token status
+ */
+ private _getPrevStatus(
+ prev_ns: TokenNamespaceData | undefined,
+ token_id: TokenId
+ ): TokenStatus | null
+ {
+ if ( prev_ns === undefined )
+ {
+ return null;
+ }
+
+ const entry = <TokenEntry>( prev_ns[ token_id ] );
+
+ return ( entry === undefined )
+ ? null
+ : entry.status;
+ }
+
+
+ /**
+ * Determine previous last updated token for namespace, otherwise `null`
+ *
+ * @param prev_ns previous namespace data
+ *
+ * @return previous last token data
+ */
+ private _getPrevLast(
+ prev_ns: TokenNamespaceData | undefined
+ ): TokenData | null
+ {
+ if ( prev_ns === undefined || ( prev_ns || {} ).last === undefined )
+ {
+ return null;
+ }
+
+ return {
+ id: prev_ns.last,
+ status: prev_ns.lastStatus,
+ prev_status: null,
+ prev_last: null,
+ prev_state: {},
+ };
+ }
+
+
+ /**
+ * Retrieve previous token states
+ *
+ * If token state information is missing, an empty object will be
+ * returned.
+ *
+ * @param prev_ns previous namespace data
+ *
+ * @return previous token states
+ */
+ private _getPrevState(
+ prev_ns: TokenNamespaceData | undefined
+ ): TokenStateHistory
+ {
+ return ( !prev_ns || prev_ns.lastState === undefined )
+ ? {}
+ : prev_ns.lastState;
+ }
+
+
+ /**
+ * Retrieve existing token under the namespace NS, if any, for the doc
+ * identified by DOC_ID
+ *
+ * If a TOKEN_ID is provided, only that token will be queried; otherwise,
+ * the most recently created token will be the subject of the query.
+ *
+ * @param doc_id document identifier
+ * @param ns token namespace
+ * @param token_id token identifier (unique to NS)
+ *
+ * @return token data
+ */
+ getToken( doc_id: DocumentId, ns: TokenNamespace, token_id: TokenId ):
+ Promise<TokenData>
+ {
+ const root = this._genRoot( ns ) + '.';
+ const fields: any = {};
+
+ fields[ root + 'last' ] = 1;
+ fields[ root + 'lastState' ] = 1;
+ fields[ root + 'lastStatus' ] = 1;
+
+ if ( token_id )
+ {
+ // XXX: injectable
+ fields[ root + token_id ] = 1;
+ }
+
+ return new Promise( ( resolve, reject ) =>
+ {
+ this._collection.findOne(
+ { id: +doc_id },
+ { fields: fields },
+ ( err: Error|null, data: TokenQueryResult ) =>
+ {
+ if ( err || !data )
+ {
+ reject( err );
+ return;
+ }
+
+ const field = <TokenNamespaceResults>data[ this._root_field ]
+ || {};
+
+ const ns_data = field[ ns ];
+
+ if ( !ns_data )
+ {
+ reject( context(
+ new UnknownTokenError(
+ `Unknown token namespace '${ns}' for document '${doc_id}`
+ ),
+ {
+ doc_id: doc_id,
+ ns: ns,
+ }
+ ) );
+
+ return;
+ }
+
+ resolve( ( token_id )
+ ? this._getRequestedToken( doc_id, ns, token_id, ns_data )
+ : this._getLatestToken( doc_id, ns, ns_data )
+ );
+ }
+ );
+ } );
+ }
+
+
+ /**
+ * Retrieve latest token data
+ *
+ * @param doc_id document id
+ * @param ns token namespace
+ * @param ns_data namespace data
+ *
+ * @return data of latest token in namespace
+ *
+ * @throws UnknownTokenError if last token data is missing
+ */
+ private _getLatestToken(
+ doc_id: DocumentId,
+ ns: TokenNamespace,
+ ns_data: TokenNamespaceData
+ ): TokenData
+ {
+ var last = ns_data.last;
+
+ if ( !last )
+ {
+ throw context(
+ new UnknownTokenError(
+ `Failed to locate last token for namespace '${ns}'` +
+ `on document '${doc_id}'`
+ ),
+ {
+ doc_id: doc_id,
+ ns: ns,
+ },
+ );
+ }
+
+ return {
+ id: last,
+ status: ns_data.lastStatus,
+ prev_status: ns_data.lastStatus,
+ prev_last: this._getPrevLast( ns_data ),
+ prev_state: this._getPrevState( ns_data ),
+ };
+ }
+
+
+ /**
+ * Retrieve latest token data
+ *
+ * @param doc_id document id
+ * @param ns token namespace
+ * @param token_id token identifier for namespace associated with NS_DATA
+ * @param ns_data namespace data
+ *
+ * @return data of requested token
+ *
+ * @throws UnknownTokenError if token data is missing
+ */
+ private _getRequestedToken(
+ doc_id: DocumentId,
+ ns: TokenNamespace,
+ token_id: TokenId,
+ ns_data: TokenNamespaceData
+ ): TokenData
+ {
+ const reqtok = <TokenEntry>ns_data[ <string>token_id ];
+
+ if ( !reqtok )
+ {
+ throw context(
+ new UnknownTokenError(
+ `Missing data for requested token '${ns}.${token_id}'` +
+ `for document '${doc_id}'`
+ ),
+ {
+ doc_id: doc_id,
+ ns: ns,
+ token_id: token_id,
+ },
+ );
+ }
+
+ return {
+ id: token_id,
+ status: reqtok.status,
+ prev_status: reqtok.status,
+ prev_last: this._getPrevLast( ns_data ),
+ prev_state: this._getPrevState( ns_data ),
+ };
+ }
+
+
+ /**
+ * Determine token root for the given namespace
+ *
+ * @param ns token namespace
+ *
+ * @return token root for namespace NS
+ */
+ private _genRoot( ns: TokenNamespace ): string
+ {
+ // XXX: injectable
+ return this._root_field + '.' + ns;
+ }
+};
+
diff --git a/src/server/token/Token.ts b/src/server/token/Token.ts
new file mode 100644
index 0000000..9d4aad0
--- /dev/null
+++ b/src/server/token/Token.ts
@@ -0,0 +1,115 @@
+/**
+ * Token abstraction
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * A token represents some sort of long-running asynchronous process. It
+ * was designed to handle HTTP requests.
+ */
+
+
+/** Identifier unique to token namespace */
+export type TokenId = NominalType<string, 'TokenId'>;
+
+
+/** Token namespace for identifiers */
+export type TokenNamespace = NominalType<string, 'TokenNamespace'>;
+
+
+/**
+ * Token states
+ *
+ * States are listed as strings for ease of {de,}serialization for storage.
+ *
+ * - `ACTIVE` - an outstanding token that has not yet been processed.
+ * - `DONE` - a token has finished processing and result data may be
+ * available.
+ * - `ACCEPTED` - a `DONE` token has been acknowledged by the requester.
+ * - `DEAD` - a token has been killed and should no longer be used.
+ *
+ * For valid state transitions, see `TokenTransition`.
+ */
+export enum TokenState {
+ ACTIVE = "ACTIVE",
+ DONE = "DONE",
+ ACCEPTED = "ACCEPTED",
+ DEAD = "DEAD",
+};
+
+
+/** Tokens that can be killed (placed into a `DEAD` state) */
+export type TokenStateDeadable =
+ TokenState.ACTIVE | TokenState.DONE | TokenState.DEAD;
+
+/** Tokens that can be completed (placed into a `DONE` state) */
+export type TokenStateDoneable = TokenState.ACTIVE;
+
+/** Tokens that can be accepted (placed into an `ACCEPTED` state) */
+export type TokenStateAcceptable = TokenState.DONE;
+
+
+/**
+ * Request token
+ *
+ * Tokens are basic state machines with a unique identifier, timestamp of
+ * the last state transition, and associated string data.
+ */
+export interface Token<T extends TokenState>
+{
+ /** Token identifier */
+ readonly id: TokenId;
+
+ /** Token state */
+ readonly state: T
+
+ /** Timestamp of most recent state transition */
+ readonly timestamp: UnixTimestamp;
+
+ /** Data associated with last state transition */
+ readonly data: string | null;
+
+ /**
+ * Whether this token id differs from the last modified for a given
+ * document within a given namespace during the last database operation
+ *
+ * Whether or not this value is significant is dependent on the
+ * caller. For example, when a new token is created, this value will
+ * always be `true`, because the last updated token couldn't possibly
+ * match a new token id. However, when updating a token, this will only
+ * be `true` if another token in the same namespace for the same
+ * document has been modified since this token was last modified.
+ *
+ * This can be used to determine whether activity on a token should be
+ * ignored. For example, a token that is not the latest may represent a
+ * stale request that should be ignored.
+ *
+ * This value can only be trusted within a context of the most recent
+ * database operation; other processes may have manipulated tokens since
+ * that time.
+ */
+ readonly last_mismatch: boolean;
+
+ /**
+ * Whether this was the most recently created token
+ *
+ * This is true iff the last token to have been in the `ACTIVE` status
+ * is shares the same token id.
+ */
+ readonly last_created: boolean;
+}
+
diff --git a/src/server/token/TokenDao.ts b/src/server/token/TokenDao.ts
new file mode 100644
index 0000000..caf9804
--- /dev/null
+++ b/src/server/token/TokenDao.ts
@@ -0,0 +1,204 @@
+/**
+ * Token data access
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * These types are used to describe the structure of the token data as it
+ * is stored in Mongo. It has a number of undesirable properties and
+ * duplicates data---this was intended to make querying easier and work
+ * around Mongo limitations.
+ *
+ * This structure can be changed in the future, but we'll need to maintain
+ * compatibility with the existing data.
+ */
+
+import { TokenId, TokenNamespace, TokenState } from "./Token";
+import { DocumentId } from "../../document/Document";
+
+
+/** Manage token updates */
+export interface TokenDao
+{
+ updateToken(
+ doc_id: DocumentId,
+ ns: TokenNamespace,
+ token_id: TokenId,
+ type: TokenState,
+ data: string | null,
+ ): Promise<TokenData>;
+
+
+ getToken(
+ doc_id: DocumentId,
+ ns: TokenNamespace,
+ token_id: TokenId
+ ): Promise<TokenData>;
+}
+
+
+/**
+ * Result of a Mongo query
+ *
+ * The returned property depends on the actual query.
+ */
+export type TokenQueryResult = { readonly [P: string]: TokenNamespaceResults | undefined };
+
+
+/** Token data for requested namespaces */
+export type TokenNamespaceResults = { readonly [P: string]: TokenNamespaceData | undefined };
+
+
+/** Last token touching various states */
+export type TokenStateHistory = { readonly [P in TokenState]?: TokenId };
+
+
+/**
+ * Token data associated with the given namespace
+ *
+ * This contains duplicate information in order to work around inconvenient
+ * limitations in [earlier] versions of Mongo.
+ */
+export interface TokenNamespaceData
+{
+ /**
+ * Identifier of last token touched in this namespace
+ */
+ readonly last: TokenId,
+
+ /**
+ * Last token id to have touched each state
+ *
+ * A field representing the state will only exist if there is a token
+ * that last touched it.
+ *
+ * This value may not exist on older documents.
+ */
+ readonly lastState?: TokenStateHistory,
+
+ /**
+ * Most recent token status
+ *
+ * This is a duplicate of the last entry in `TokenEntry#statusLog`.
+ */
+ readonly lastStatus: TokenStatus,
+
+ /**
+ * Tokens indexed by identifier
+ *
+ * These data are inconveniently placed---the type definition here is to
+ * accommodate the above fields. Anything using this should cast to
+ * `TokenEntry`.
+ */
+ readonly [P: string]:
+ TokenEntry | TokenStateHistory | TokenStatus | TokenId | undefined,
+}
+
+
+/**
+ * Information about a given token
+ */
+export interface TokenEntry
+{
+ /**
+ * Current token status
+ *
+ * This is a duplicate of the last element of `statusLog`.
+ */
+ readonly status: TokenStatus,
+
+ /**
+ * Log of all past status changes and any associated data
+ *
+ * This is pushed to on each status change. The last element is
+ * duplicated in `status`.
+ */
+ readonly statusLog: TokenStatus[],
+}
+
+
+/**
+ * Status of the token (past or present)
+ */
+export interface TokenStatus
+{
+ /**
+ * State of the token
+ */
+ readonly type: TokenState,
+
+ /**
+ * Unix timestamp representing when the status change occurred
+ */
+ readonly timestamp: UnixTimestamp,
+
+ /**
+ * Arbitrary data associated with the status change
+ *
+ * For example, a token of status `DONE` may be associated with the
+ * fulfillment of a request, in which case this may contain the response
+ * data.
+ */
+ readonly data: string | null,
+}
+
+
+/**
+ * Token information returned from database queries
+ *
+ * This attempts to provide raw data without making assumptions as to how it
+ * may be used. For example, rather than returning whether the token was
+ * the last modified, it returns the last token before the database
+ * operation took place (`prev_last`). Note that this interface is
+ * recursively defined, but will only be a maximum of two levels deep (there
+ * will be no `prev_last.prev_last !== null`).
+ */
+export interface TokenData
+{
+ /** Token identifier */
+ id: TokenId,
+
+ /** Status of token after the database operation */
+ status: TokenStatus,
+
+ /**
+ * Status of token before the database operation
+ *
+ * If the operation is to retrieve a token (rather than to update it),
+ * then this status will be identical to `status`.
+ */
+ prev_status: TokenStatus | null,
+
+ /**
+ * Token data of the last updated token for this document id and
+ * namespace before the last database operation
+ *
+ * This is derived from the value of `TokenNamespaceData.last` and
+ * `TokenNamespaceData.lastStatus` prior to the most recent operation
+ * (e.g. Mongo's `findAndModify` with `new` set to `false`).
+ */
+ prev_last: TokenData | null,
+
+ /**
+ * Last token id to have touched each state
+ *
+ * A field representing the state will only exist if there is a token
+ * that last touched it. If there are no previous states, the result
+ * will be an empty object.
+ */
+ prev_state: { [P in TokenState]?: TokenId },
+}
diff --git a/src/server/token/UnknownTokenError.ts b/src/server/token/UnknownTokenError.ts
new file mode 100644
index 0000000..c6eb671
--- /dev/null
+++ b/src/server/token/UnknownTokenError.ts
@@ -0,0 +1,27 @@
+/**
+ * Unknown token error
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * This still uses ease.js because it does a good job of transparently
+ * creating Error subtypes.
+ */
+
+const { Class } = require( 'easejs' );
+
+export const UnknownTokenError = Class( 'UnknownTokenError' ).extend( Error, {} );
diff --git a/src/server/token/store/PersistentTokenStore.ts b/src/server/token/store/PersistentTokenStore.ts
new file mode 100644
index 0000000..6f4d871
--- /dev/null
+++ b/src/server/token/store/PersistentTokenStore.ts
@@ -0,0 +1,279 @@
+/**
+ * Persistent token management
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import {
+ Token,
+ TokenId,
+ TokenNamespace,
+ TokenState,
+ TokenStateAcceptable,
+ TokenStateDeadable,
+ TokenStateDoneable,
+} from "../Token";
+
+import { TokenStore } from "./TokenStore";
+import { TokenDao, TokenData } from "../TokenDao";
+import { DocumentId } from "../../../document/Document";
+
+
+/**
+ * Persistent token storage
+ *
+ * This store is used to create, read, and modify tokens. Its API is
+ * designed to constrain state transitions at compile-time.
+ *
+ * Stores are initialized with a given document id and namespace, and DAOs
+ * are initialized with a root field. Consequently, a new `TokenStore` must
+ * be created for each group (namespace) of tokens that needs to be operated
+ * on per document.
+ *
+ * A nullary token id generator must be provided. Given that it takes no
+ * arguments, this means that it is nondeterministic. This function must
+ * generate a unique token id at the namespace level or higher.
+ *
+ * The philosophy of this store is that any token within a given namespace
+ * can be updated at any time, but each namespace has a unique "last" token
+ * by document that represents the last token to have been updated within
+ * that context. Also stored is a list of tokens associated with the most
+ * recent transition to each state. When performing any operation on that
+ * namespace, information regarding the last tokens will be provided so that
+ * the caller can determine whether other tokens within that same context
+ * have been modified since a given token was last updated, which may
+ * indicate that a token has been superceded by another.
+ *
+ * As an example, consider the following sequence of events within some
+ * namespace "location" for some document 1000:
+ *
+ * 1. A token `A` is created for a request to a service. `last` is updated
+ * to point to `A`. The last `ACTIVE` token is `A`.
+ *
+ * 2. The user changes information about the location.
+ *
+ * 3. Another token `B` is created to request information for the new
+ * location data. `last` is updated to point to `B`. The last
+ * `ACTIVE` token is `B`.
+ *
+ * 4. The response for token `A` returns and `A` is updated. The last
+ * token in the `DONE` state is `A`.
+ *
+ * 5. The caller for token `A` sees that the has `ACTIVE` token no longer
+ * points to `A` (by observing `last_created`), and so ignores the
+ * reply, understanding that `A` is now stale.
+ *
+ * 6. The response for  `B` returns and `B` is updated. The last `DONE`
+ * token is now `B`.
+ *
+ * 7. The caller notices that `last_created` is _not_ set, and so
+ * proceeds to continue processing token `B`. The last token in the
+ * `DONE` state is now `B`.
+ *
+ * For more information on tokens, see `Token`.
+ */
+export class PersistentTokenStore implements TokenStore
+{
+ /**
+ * Initialize store
+ *
+ * @param _dao data access layer
+ * @param _doc_id constrain store to given document id
+ * @param _token_ns token namespace
+ * @param _idgen token id generator
+ */
+ constructor(
+ private readonly _dao: TokenDao,
+ private readonly _doc_id: DocumentId,
+ private readonly _token_ns: TokenNamespace,
+ private readonly _idgen: () => TokenId
+ ) {}
+
+
+ /**
+ * Look up an existing token by id
+ *
+ * This looks up the given token id `token_id` for the document,
+ * constrained to this store's namespace and document id.
+ *
+ * The state of the returned token cannot be determined until runtime,
+ * so the caller is responsible for further constraining the type.
+ *
+ * @param token_id token id
+ *
+ * @return requested token, if it exists
+ */
+ lookupToken( token_id: TokenId ): Promise<Token<TokenState>>
+ {
+ return this._dao.getToken( this._doc_id, this._token_ns, token_id )
+ .then( data => this._tokenDataToToken( data, data.status.type ) );
+ }
+
+
+ /**
+ * Create a new token for the given document within the store's
+ * namespace
+ *
+ * The returned token will always be `ACTIVE` and will always have
+ * `last_mistmatch` set.
+ */
+ createToken(): Promise<Token<TokenState.ACTIVE>>
+ {
+ return this._dao.updateToken(
+ this._doc_id, this._token_ns, this._idgen(), TokenState.ACTIVE, null
+ )
+ .then( data => this._tokenDataToToken(
+ data, TokenState.ACTIVE, true
+ ) );
+ }
+
+
+ /**
+ * Convert raw token data to a higher-level `Token`
+ *
+ * The token state must be provided in addition to the token data for
+ * compile-time type checking, where permissable.
+ *
+ * A token will have `last_mistmatch` set if the last token before a
+ * database operation does not match `data.id`.
+ *
+ * @param data raw token data
+ * @param state token state
+ *
+ * @return new token
+ */
+ private _tokenDataToToken<T extends TokenState>(
+ data: TokenData,
+ state: T,
+ created: boolean = false
+ ):
+ Token<T>
+ {
+ return {
+ id: data.id,
+ state: state,
+ timestamp: data.status.timestamp,
+ data: data.status.data,
+ last_mismatch: this._isLastMistmatch( data ),
+ last_created: created || this._isLastCreated( data ),
+ };
+ }
+
+
+ /**
+ * Determine whether the given token data represents a mismatch on the
+ * previous last token id
+ *
+ * For more information on what this means, see `Token.last_mistmatch`.
+ *
+ * @param data raw token data
+ */
+ private _isLastMistmatch( data: TokenData ): boolean
+ {
+ return ( data.prev_last === null )
+ || ( data.id !== data.prev_last.id );
+ }
+
+
+ /**
+ * Whether the token represents the most recently created token
+ *
+ * @param data raw token data
+ *
+ * @return whether token was the most recently created
+ */
+ private _isLastCreated( data: TokenData ): boolean
+ {
+ return ( data.prev_state !== undefined )
+ && ( data.prev_state[ TokenState.ACTIVE ] === data.id );
+ }
+
+
+ /**
+ * Complete a token
+ *
+ * Completing a token places it into a `DONE` state. Only certain
+ * types of tokens can be completed (`TokenStateDoneable`).
+ *
+ * A token that in a `DONE` state means that processing has completed
+ * and is waiting acknowledgement from the system responsible for
+ * handling the response.
+ *
+ * @param src token to complete
+ * @param data optional response data
+ *
+ * @return token in `DONE` state
+ */
+ completeToken( src: Token<TokenStateDoneable>, data: string | null ):
+ Promise<Token<TokenState.DONE>>
+ {
+ return this._dao.updateToken(
+ this._doc_id, this._token_ns, src.id, TokenState.DONE, data
+ )
+ .then( data => this._tokenDataToToken( data, TokenState.DONE ) );
+ }
+
+
+ /**
+ * Acknowledge a token as accepted
+ *
+ * Accepting a token places it into an `ACCEPTED` state. Only certain
+ * types of tokens can be accepted (`TokenStateAcceptable`).
+ *
+ * A token that in an `ACCEPTED` state means that a previously completed
+ * token has been acknowledged and all resources related to the
+ * processing of the token can be freed.
+ *
+ * @param src token to accept
+ * @param data optional accept reason
+ *
+ * @return token in `ACCEPTED` state
+ */
+ acceptToken( src: Token<TokenStateAcceptable>, data: string | null ):
+ Promise<Token<TokenState.ACCEPTED>>
+ {
+ return this._dao.updateToken(
+ this._doc_id, this._token_ns, src.id, TokenState.ACCEPTED, data
+ )
+ .then( data => this._tokenDataToToken( data, TokenState.ACCEPTED ) );
+ }
+
+
+ /**
+ * Kill a token
+ *
+ * Killing a token places it into a `DEAD` state. Only certain types of
+ * tokens can be killed (`TokenStateDeadable`).
+ *
+ * A token that in a `DEAD` state means that any processing related to
+ * that token should be aborted.
+ *
+ * @param src token to kill
+ * @param data optional kill reason
+ *
+ * @return token in `DEAD` state
+ */
+ killToken( src: Token<TokenStateDeadable>, data: string | null ):
+ Promise<Token<TokenState.DEAD>>
+ {
+ return this._dao.updateToken(
+ this._doc_id, this._token_ns, src.id, TokenState.DEAD, data
+ )
+ .then( data => this._tokenDataToToken( data, TokenState.DEAD ) );
+ }
+}
diff --git a/src/server/token/store/TokenStore.ts b/src/server/token/store/TokenStore.ts
new file mode 100644
index 0000000..3ef9f13
--- /dev/null
+++ b/src/server/token/store/TokenStore.ts
@@ -0,0 +1,120 @@
+/**
+ * Token management
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import {
+ Token,
+ TokenId,
+ TokenState,
+ TokenStateAcceptable,
+ TokenStateDeadable,
+ TokenStateDoneable,
+} from "../Token";
+
+
+/**
+ * Token storage
+ *
+ * This store is used to create, read, and modify tokens. Its API is
+ * designed to constrain state transitions at compile-time.
+ */
+export interface TokenStore
+{
+ /**
+ * Look up an existing token by id
+ *
+ * This looks up the given token id `token_id` for the document,
+ * constrained to this store's namespace and document id.
+ *
+ * The state of the returned token cannot be determined until runtime,
+ * so the caller is responsible for further constraining the type.
+ *
+ * @param token_id token id
+ *
+ * @return requested token, if it exists
+ */
+ lookupToken( token_id: TokenId ): Promise<Token<TokenState>>;
+
+
+ /**
+ * Create a new token for the given document within the store's
+ * namespace
+ *
+ * The returned token will always be `ACTIVE` and will always have
+ * `last_mistmatch` set.
+ */
+ createToken(): Promise<Token<TokenState.ACTIVE>>;
+
+
+ /**
+ * Complete a token
+ *
+ * Completing a token places it into a `DONE` state. Only certain
+ * types of tokens can be completed (`TokenStateDoneable`).
+ *
+ * A token that in a `DONE` state means that processing has completed
+ * and is waiting acknowledgement from the system responsible for
+ * handling the response.
+ *
+ * @param src token to complete
+ * @param data optional response data
+ *
+ * @return token in `DONE` state
+ */
+ completeToken( src: Token<TokenStateDoneable>, data: string | null ):
+ Promise<Token<TokenState.DONE>>;
+
+
+ /**
+ * Acknowledge a token as accepted
+ *
+ * Accepting a token places it into an `ACCEPTED` state. Only certain
+ * types of tokens can be accepted (`TokenStateAcceptable`).
+ *
+ * A token that in an `ACCEPTED` state means that a previously completed
+ * token has been acknowledged and all resources related to the
+ * processing of the token can be freed.
+ *
+ * @param src token to accept
+ * @param data optional accept reason
+ *
+ * @return token in `ACCEPTED` state
+ */
+ acceptToken( src: Token<TokenStateAcceptable>, data: string | null ):
+ Promise<Token<TokenState.ACCEPTED>>;
+
+
+ /**
+ * Kill a token
+ *
+ * Killing a token places it into a `DEAD` state. Only certain types of
+ * tokens can be killed (`TokenStateDeadable`).
+ *
+ * A token that in a `DEAD` state means that any processing related to
+ * that token should be aborted.
+ *
+ * @param src token to kill
+ * @param data optional kill reason
+ *
+ * @return token in `DEAD` state
+ */
+ killToken( src: Token<TokenStateDeadable>, data: string | null ):
+ Promise<Token<TokenState.DEAD>>;
+}
diff --git a/src/types/misc.d.ts b/src/types/misc.d.ts
new file mode 100644
index 0000000..83b11b5
--- /dev/null
+++ b/src/types/misc.d.ts
@@ -0,0 +1,56 @@
+/**
+ * Miscellaneous types
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+/**
+ * Define a nominal type
+ *
+ * Nominal types are types that are enforced by name. Typescript implements
+ * structural subtyping (duck typing), which means that two values with the
+ * same structure are considered to be compatable. This opens the
+ * opportunity for certain classes of bugs: if we're expecting a Unix
+ * timestamp, but we're given a user id, it'd be nice if we could catch that
+ * at compile time.
+ *
+ * This uses a method the TS community calls "branding". It is abstracted
+ * behind a generic. See example uses below. I used the name `NominalType`
+ * rather than `Brand` since searching for the former provides much better
+ * literature on the topic, which will hopefully help in debugging when
+ * errors are inevitable encountered.
+ */
+type NominalType<K, T> = K & { __nominal_type__: T };
+
+
+/**
+ * Unix timestamp
+ *
+ * Number of seconds since the Unix epoch (1970-01-01 UTC).
+ */
+type UnixTimestamp = NominalType<number, 'UnixTimestamp'>;
+
+
+/**
+ * Oldschool NodeJS callback
+ *
+ * We should migrate to promises over time. The purpose of this type is to
+ * reduce the boilerplate of these function definitions, and to clearly
+ * document that this pattern is something that used to be done frequently.
+ */
+type NodeCallback<T, R = void> = ( e: Error | null, result: T | null ) => R;
diff --git a/src/types/mongodb.d.ts b/src/types/mongodb.d.ts
new file mode 100644
index 0000000..808b458
--- /dev/null
+++ b/src/types/mongodb.d.ts
@@ -0,0 +1,161 @@
+/**
+ * Type definitions for mongodb library
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * These definitions are for a very old mongodb library, which will be
+ * once we get around to updating node. Quite a failure on the maintenance
+ * front.
+ */
+
+declare module "mongodb";
+
+
+/**
+ * Node-style callback for queries
+ */
+type MongoCallback = ( err: Error|null, data: { [P: string]: any } ) => void;
+
+
+/**
+ * Options for `update` queries
+ *
+ * This is not at all comprehensive; it covers only the fields we actually
+ * make use of.
+ */
+interface MongoQueryUpdateOptions
+{
+ upsert?: boolean,
+}
+
+
+/**
+ * Options for `findOne` queries
+ *
+ * This is not at all comprehensive; it covers only the fields we actually
+ * make use of.
+ */
+interface MongoFindOneOptions
+{
+ fields?: MongoFieldSelector,
+}
+
+
+/**
+ * Options for `findAndModify` queries
+ *
+ * This is not at all comprehensive; it covers only the fields we actually
+ * make use of.
+ */
+interface MongoFindAndModifyOptions
+{
+ /** Whether to return new values instead of previous (default false) */
+ new?: boolean,
+
+ /** Field filter for query result */
+ fields?: MongoFieldSelector,
+
+ /** Whether to create if it does not already exist */
+ upsert?: boolean,
+}
+
+
+/** Mongo query selector */
+type MongoSelector = { [P: string]: any };
+
+
+/** Field selector */
+type MongoFieldSelector = { [P: string]: number };
+
+
+/** Mongo update clause */
+type MongoUpdate = MongoSelector;
+
+
+/** Sorting clause **/
+type MongoSortClause = Array<string | [ string, MongoSortDirection ]>;
+
+
+/** Sort direction */
+type MongoSortDirection = -1 | 1 | 'ascending' | 'descending' | 'asc' | 'desc';
+
+
+/**
+ * An approximation of the MongoCollection interface, as we use it
+ *
+ * The actual interface is a bit more dynamic and complex. Since the
+ * library is going to be updated before this one sees much more use, we'll
+ * hold off on more comprehensive definitions.
+ */
+declare interface MongoCollection
+{
+ /**
+ * Update a document with additional query options
+ *
+ * To simplify the interface, we're always going to require `options`,
+ * even if they are empty. Otherwise typing is a verbose PITA when
+ * writing tests.
+ *
+ * @param selector document query
+ * @param data update data
+ * @param options query options
+ * @param callback continuation on completion
+ *
+ * @return callback return value
+ */
+ update(
+ selector: MongoSelector,
+ data: MongoUpdate,
+ options: MongoQueryUpdateOptions,
+ callback: MongoCallback
+ ): void;
+
+
+ /**
+ * Execute a query and return the first result
+ *
+ * Unlike `update`, the callback return value is not propagated, and so
+ * the callback ought not return anything.
+ *
+ * @param selector document query
+ * @param fields fields to return
+ * @param callback continuation on completion
+ */
+ findOne(
+ selector: MongoSelector,
+ fields: MongoFindOneOptions,
+ callback: MongoCallback
+ ): void;
+
+
+ /**
+ * Execute an update and return query results
+ *
+ * Unless `options.new` is `true`, the results of the query _before_ the
+ * update are returned.
+ *
+ * @param query document query
+ */
+ findAndModify(
+ query: MongoSelector,
+ sort: MongoSortClause,
+ update: MongoUpdate,
+ options: MongoFindAndModifyOptions,
+ callback: MongoCallback,
+ ): void;
+}
diff --git a/src/types/naugty.d.ts b/src/types/naugty.d.ts
new file mode 100644
index 0000000..cdca847
--- /dev/null
+++ b/src/types/naugty.d.ts
@@ -0,0 +1,48 @@
+/**
+ * Things that should only be used when absolutely necessary
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * DEFINITIONS IN THIS PACKAGE DO NAUGHTY THINGS THAT CIRCUMVENT TYPE
+ * SAFETY; THEY SHOULD BE USED ONLY WHEN NECESSARY, AND ONLY WHEN YOU KNOW
+ * WHAT YOU'RE DOING, SINCE THEY MAY INTRODUCE BUGS!
+ *
+ * The prefix `___` is added to each of the names here so that code can be
+ * easily searched for uses of naughty things.
+ *
+ * These types are also exported, unlike some other `.d.ts` files which are
+ * universally available during complication---this forces the importing of
+ * this file, named `naughty.d.ts`, which should raise some eyebrows and
+ * make people less likely to copy existing code that uses it.
+ */
+
+declare module 'naughty'
+{
+ /**
+ * Make type `T` writable while otherwise maintaining type safety
+ *
+ * _Only use this generic if you are the owner of the object being
+ * manipulated!__
+ *
+ * This should be used when we want types to be readonly, but we need to
+ * be able to modify an existing object to initialize the
+ * properties. This should only be used in situations where it's not
+ * feasible to add those properties when the object is first created.
+ */
+ export type ___Writable<T> = { -readonly [K in keyof T]: T[K] };
+}
diff --git a/test/dapi/AutoRetryTest.js b/test/dapi/AutoRetryTest.js
index 424babe..c6d12b3 100644
--- a/test/dapi/AutoRetryTest.js
+++ b/test/dapi/AutoRetryTest.js
@@ -282,7 +282,7 @@ function _createStub( err, resp )
given: null,
requests: 0,
- 'virtual public request': function( data, callback )
+ 'virtual public request': function( data, callback, id )
{
this.given = data;
this.requests++;
diff --git a/test/dapi/DummyDataApi.js b/test/dapi/DummyDataApi.js
index a33c48b..038b184 100644
--- a/test/dapi/DummyDataApi.js
+++ b/test/dapi/DummyDataApi.js
@@ -59,9 +59,9 @@ module.exports = Class( 'DummyDataApi' )
*
* @return {DataApi} self
*/
- 'virtual public request'( data, callback )
+ 'virtual public request'( data, callback, id )
{
- this._reqCallback( data, callback );
+ this._reqCallback( data, callback, id );
return this;
},
} );
diff --git a/test/dapi/format/JsonResponseTest.js b/test/dapi/format/JsonResponseTest.js
index 970ddeb..7f198c9 100644
--- a/test/dapi/format/JsonResponseTest.js
+++ b/test/dapi/format/JsonResponseTest.js
@@ -142,7 +142,7 @@ function _createStubbedDapi( err, resp )
{
given: null,
- 'virtual public request': function( data, callback )
+ 'virtual public request': function( data, callback, id )
{
this.given = data;
callback( err, resp );
diff --git a/test/error/ChainedErrorTest.ts b/test/error/ChainedErrorTest.ts
new file mode 100644
index 0000000..d6d98ed
--- /dev/null
+++ b/test/error/ChainedErrorTest.ts
@@ -0,0 +1,67 @@
+/**
+ * Tests error chaining
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import * as sut from "../../src/error/ChainedError";
+import { expect } from 'chai';
+
+
+describe( 'ChainedError', () =>
+{
+ it( "can be created with generic error", () =>
+ {
+ const eprev = new Error( "previous error" );
+
+ expect( sut.chain( new Error( "new error" ), eprev ).chain )
+ .to.equal( eprev );
+ } );
+
+
+ it( "can be chained to arbitrary depth", () =>
+ {
+ const e1 = new Error( "lower" );
+ const e2 = sut.chain( new Error( "mid" ), e1 );
+ const e3 = sut.chain( new Error( "outer" ), e2 );
+
+ expect( sut.isChained( e3 ) ).to.be.true;
+ expect( sut.isChained( e2 ) ).to.be.true;
+ expect( sut.isChained( e1 ) ).to.be.false;
+ } );
+
+
+ it( "provides type predicate for TypeScript", () =>
+ {
+ const inner = new Error( "inner" );
+
+ // force to Error to discard ChainedError type
+ const outer: Error = sut.chain( new Error( "outer" ), inner );
+
+ if ( sut.isChained( outer ) )
+ {
+ // if isChained was properly defined, then outer should now
+ // have type ChainedError, and so this should compile
+ expect( outer.chain ).to.equal( inner );
+ }
+ else
+ {
+ expect.fail();
+ }
+ } );
+} );
diff --git a/test/error/ContextErrorTest.ts b/test/error/ContextErrorTest.ts
new file mode 100644
index 0000000..c02c3f2
--- /dev/null
+++ b/test/error/ContextErrorTest.ts
@@ -0,0 +1,68 @@
+/**
+ * Tests error context
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import * as sut from "../../src/error/ContextError";
+import { expect } from 'chai';
+
+
+describe( 'ContextError', () =>
+{
+ it( "can be created with generic error", () =>
+ {
+ const context = { foo: "context" };
+
+ expect( sut.context( new Error( "test error" ), context ).context )
+ .to.equal( context );
+ } );
+
+
+ it( "provides type predicate for TypeScript", () =>
+ {
+ const context = { bar: "baz context" };
+
+ // force to Error to discard ContextError type
+ const e: Error = sut.context( new Error( "test error" ), context );
+
+ if ( sut.hasContext( e ) )
+ {
+ // if isChained was properly defined, then outer should now
+ // have type ChainedError, and so this should compile
+ expect( e.context ).to.equal( context );
+ }
+ else
+ {
+ expect.fail();
+ }
+ } );
+
+
+ it( "can create typed contexts", () =>
+ {
+ type FooErrorContext = { foo: string };
+
+ // this is the actual test
+ const e: sut.ContextError<FooErrorContext> =
+ sut.context( new Error( "test error" ), { foo: "context" } );
+
+ // contravariance check (would fail to compile)
+ expect( sut.hasContext( e ) ).to.be.true;
+ } );
+} );
diff --git a/test/event/FieldVisibilityEventHandlerTest.js b/test/event/FieldVisibilityEventHandlerTest.js
index 0283f26..6c86df0 100644
--- a/test/event/FieldVisibilityEventHandlerTest.js
+++ b/test/event/FieldVisibilityEventHandlerTest.js
@@ -141,6 +141,6 @@ function createMockStepUi( expected_name, showf, hidef )
function createStubDataProvider( fail_callback )
{
return {
- clearFailures: fail_callback || () => {},
+ clearFailures: fail_callback || ( () => {} ),
};
}
diff --git a/test/server/dapi/TokenedDataApiTest.ts b/test/server/dapi/TokenedDataApiTest.ts
new file mode 100644
index 0000000..27375fa
--- /dev/null
+++ b/test/server/dapi/TokenedDataApiTest.ts
@@ -0,0 +1,242 @@
+/**
+ * Test DataAPI backed by tokens for logging and precedence
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of liza.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import { TokenedDataApi as Sut } from "../../../src/server/dapi/TokenedDataApi";
+
+import { DataApi, DataApiInput, DataApiResult } from "../../../src/dapi/DataApi";
+import { TokenStore } from "../../../src/server/token/store/TokenStore";
+import {
+ Token,
+ TokenId,
+ TokenNamespace,
+ TokenState,
+ TokenStateDoneable,
+} from "../../../src/server/token/Token";
+import { hasContext } from "../../../src/error/ContextError";
+
+import { expect } from 'chai';
+
+
+describe( 'TokenedDataApi', () =>
+{
+ const expected_ns = 'foo_ns';
+
+
+ ( <[string, boolean, ( e: Error|null ) => void][]>[
+ [
+ "creates token and returns data if last_created",
+ true,
+ e => expect( e ).to.equal( null ),
+ ],
+ [
+ "creates token and does not callback if not last_created",
+ false,
+ e =>
+ {
+ expect( e ).to.be.instanceof( Error );
+
+ // this awkwardness can be mitigated in TS 3.7
+ // (see https://github.com/microsoft/TypeScript/pull/32695)
+ if ( e instanceof Error )
+ {
+ expect( e.message ).to.contain( "superceded" );
+ expect( hasContext( e ) ).to.be.true;
+
+ if ( hasContext( e ) )
+ {
+ expect( e.context.id ).to.equal( expected_ns );
+ }
+ }
+ },
+ ],
+ ] ).forEach( ( [ label, last_created, expected_err ] ) => it( label, done =>
+ {
+ const expected_data = { given: "data" };
+ const dapi_ret_data = [ { return: "data" } ];
+
+ const stub_tok: Token<TokenState.ACTIVE> =
+ createStubToken( last_created );
+
+ let tok_completed = false;
+ let tok_ackd = false;
+
+ const mock_tstore = new class implements TokenStore
+ {
+ lookupToken()
+ {
+ return Promise.reject( Error( "not used" ) );
+ }
+
+ createToken()
+ {
+ return Promise.resolve( stub_tok );
+ }
+
+ completeToken(
+ given_tok: Token<TokenStateDoneable>,
+ given_data: string,
+ )
+ {
+ expect( given_tok ).to.equal( stub_tok );
+ expect( given_data ).to.equal(
+ JSON.stringify( dapi_ret_data )
+ );
+
+ const ret = Object.create( stub_tok );
+ ret.state = TokenState.DONE;
+
+ tok_completed = true;
+
+ return Promise.resolve( ret );
+ }
+
+ acceptToken()
+ {
+ expect( tok_completed ).to.be.true;
+ expect( last_created ).to.be.true;
+
+ tok_ackd = true;
+ return Promise.resolve( Object.create( stub_tok ) );
+ }
+
+ killToken()
+ {
+ expect( tok_completed ).to.be.true;
+ expect( last_created ).to.be.false;
+
+ tok_ackd = true;
+ return Promise.resolve( Object.create( stub_tok ) );
+ }
+ }();
+
+ const mock_dapi = new class implements DataApi
+ {
+ request(
+ given_data: DataApiInput,
+ callback: NodeCallback<DataApiResult>,
+ given_id: string,
+ ): this
+ {
+ expect( given_data ).to.equal( expected_data );
+ expect( given_id ).to.equal( expected_ns );
+
+ callback( null, dapi_ret_data );
+
+ return this;
+ }
+ };
+
+ const ctor = ( ns:TokenNamespace ) =>
+ {
+ expect( ns ).to.equal( expected_ns );
+ return mock_tstore;
+ };
+
+ const callback: NodeCallback<DataApiResult> = ( e, data ) =>
+ {
+ expect( tok_ackd ).to.be.true;
+
+ expected_err( e );
+
+ expect( data ).to.equal(
+ ( last_created ) ? dapi_ret_data : null
+ );
+
+ done();
+ };
+
+ new Sut( mock_dapi, ctor )
+ .request( expected_data, callback, expected_ns );
+ } ) );
+
+
+ it( "propagates dapi request errors", done =>
+ {
+ const expected_err = Error( "test dapi error" );
+
+ const stub_tok: Token<TokenState.ACTIVE> =
+ createStubToken( true );
+
+ const mock_tstore = new class implements TokenStore
+ {
+ lookupToken()
+ {
+ return Promise.reject( Error( "not used" ) );
+ }
+
+ createToken()
+ {
+ return Promise.resolve( stub_tok );
+ }
+
+ completeToken()
+ {
+ return Promise.reject( Error( "not used" ) );
+ }
+
+ acceptToken()
+ {
+ return Promise.reject( Error( "not used" ) );
+ }
+
+ killToken()
+ {
+ return Promise.reject( Error( "not used" ) );
+ }
+ }();
+
+ const mock_dapi = new class implements DataApi
+ {
+ request(
+ _: any,
+ callback: NodeCallback<DataApiResult>,
+ )
+ {
+ callback( expected_err, null );
+ return this;
+ }
+ };
+
+ const callback: NodeCallback<DataApiResult> = ( e, data ) =>
+ {
+ expect( data ).to.equal( null );
+ expect( e ).to.equal( expected_err );
+
+ done();
+ };
+
+ new Sut( mock_dapi, () => mock_tstore )
+ .request( {}, callback, expected_ns );
+ } );
+} );
+
+
+function createStubToken( last_created: boolean ): Token<TokenState.ACTIVE>
+{
+ return {
+ id: <TokenId>'dummy-id',
+ state: TokenState.ACTIVE,
+ timestamp: <UnixTimestamp>0,
+ data: "",
+ last_mismatch: false,
+ last_created: last_created,
+ };
+}
diff --git a/test/server/service/RatingServiceSubmitNotifyTest.js b/test/server/service/RatingServiceSubmitNotifyTest.js
index e7bf4c6..1333eaf 100644
--- a/test/server/service/RatingServiceSubmitNotifyTest.js
+++ b/test/server/service/RatingServiceSubmitNotifyTest.js
@@ -127,7 +127,7 @@ describe( 'RatingServiceSubmitNotify', () =>
// warning: if an expectation fails, because of how
// RatingService handles errors, it will cause the test to
// _hang_ rather than throw the assertion error
- request( data, callback )
+ request( data, callback, id )
{
expect( given_request ).to.equal( request );
expect( data ).to.deep.equal( { quote_id: quote_id } );
diff --git a/test/server/token/MongoTokenDaoTest.ts b/test/server/token/MongoTokenDaoTest.ts
new file mode 100644
index 0000000..167685a
--- /dev/null
+++ b/test/server/token/MongoTokenDaoTest.ts
@@ -0,0 +1,531 @@
+/**
+ * Token state management test
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import {
+ TokenData,
+ TokenQueryResult,
+ TokenStatus,
+} from "../../../src/server/token/TokenDao";
+
+import { MongoTokenDao as Sut } from "../../../src/server/token/MongoTokenDao";
+
+import {
+ TokenId,
+ TokenNamespace,
+ TokenState,
+} from "../../../src/server/token/Token";
+
+import { DocumentId } from "../../../src/document/Document";
+import { UnknownTokenError } from "../../../src/server/token/UnknownTokenError";
+import { hasContext } from "../../../src/error/ContextError";
+
+
+import { expect, use as chai_use } from 'chai';
+chai_use( require( 'chai-as-promised' ) );
+
+
+describe( 'server.token.TokenDao', () =>
+{
+ describe( '#updateToken', () =>
+ {
+ const field = 'foo_field';
+ const did = <DocumentId>12345;
+ const ns = <TokenNamespace>'namespace';
+ const tok_id = <TokenId>'tok123';
+ const tok_type = TokenState.DONE;
+ const data = "some data";
+ const timestamp = <UnixTimestamp>12345;
+
+ const root = field + '.' + ns;
+
+ const last_tok_id = <TokenId>'last-tok';
+
+ const last: TokenStatus = {
+ type: TokenState.DEAD,
+ timestamp: <UnixTimestamp>4567,
+ data: "last token",
+ };
+
+ const prev: TokenStatus = {
+ type: TokenState.ACTIVE,
+ timestamp: <UnixTimestamp>11111,
+ data: "prev status",
+ };
+
+ ( <{ label: string, given: TokenQueryResult, expected: TokenData }[]>[
+ {
+ label: "updates token and returns previous data",
+
+ given: {
+ [field]: {
+ [ns]: {
+ last: last_tok_id,
+ lastState: {
+ [ prev.type ]: last_tok_id,
+ },
+ lastStatus: {
+ type: last.type,
+ timestamp: last.timestamp,
+ data: last.data,
+ },
+ [tok_id]: {
+ status: {
+ type: prev.type,
+ timestamp: prev.timestamp,
+ data: prev.data,
+ },
+ },
+ },
+ },
+ },
+ expected: {
+ id: tok_id,
+ status: {
+ type: tok_type,
+ timestamp: timestamp,
+ data: data,
+ },
+ prev_state: {
+ [ prev.type ]: last_tok_id,
+ },
+ prev_status: prev,
+ prev_last: {
+ id: last_tok_id,
+ status: last,
+ prev_status: null,
+ prev_last: null,
+ prev_state: {},
+ },
+ },
+ },
+
+ {
+ label: "returns null for prev status if missing data",
+
+ given: {
+ [field]: {
+ [ns]: {
+ last: last_tok_id,
+ lastStatus: {
+ type: last.type,
+ timestamp: last.timestamp,
+ data: last.data,
+ },
+ },
+ },
+ },
+ expected: {
+ id: tok_id,
+ status: {
+ type: tok_type,
+ timestamp: timestamp,
+ data: data,
+ },
+ prev_status: null,
+ prev_state: {},
+ prev_last: {
+ id: last_tok_id,
+ status: last,
+ prev_status: null,
+ prev_last: null,
+ prev_state: {},
+ },
+ },
+ },
+
+ {
+ label: "returns null for missing namespace data",
+
+ given: {
+ [field]: {
+ [ns]: {},
+ },
+ },
+ expected: {
+ id: tok_id,
+ status: {
+ type: tok_type,
+ timestamp: timestamp,
+ data: data,
+ },
+ prev_status: null,
+ prev_state: {},
+ prev_last: null,
+ },
+ },
+
+ {
+ label: "returns null for missing namespace",
+
+ given: {
+ [field]: {},
+ },
+ expected: {
+ id: tok_id,
+ status: {
+ type: tok_type,
+ timestamp: timestamp,
+ data: data,
+ },
+ prev_status: null,
+ prev_state: {},
+ prev_last: null,
+ },
+ },
+
+ {
+ label: "returns null for missing root field",
+
+ given: {},
+ expected: {
+ id: tok_id,
+ status: {
+ type: tok_type,
+ timestamp: timestamp,
+ data: data,
+ },
+ prev_status: null,
+ prev_state: {},
+ prev_last: null,
+ },
+ },
+ ] ).forEach( ( { given, expected, label } ) => it( label, () =>
+ {
+ const coll: MongoCollection = {
+ findAndModify( selector, _sort, given_data, options, callback )
+ {
+ const expected_entry: TokenStatus = {
+ type: tok_type,
+ timestamp: timestamp,
+ data: data,
+ };
+
+ expect( selector.id ).to.equal( did );
+
+ expect( given_data ).to.deep.equal( {
+ $set: {
+ [ `${root}.last` ]: tok_id,
+ [ `${root}.lastState.${tok_type}` ]: tok_id,
+ [ `${root}.lastStatus` ]: expected_entry,
+ [ `${root}.${tok_id}.status` ]: expected_entry,
+ },
+ $push: {
+ [ `${root}.${tok_id}.statusLog` ]: expected_entry,
+ },
+ } );
+
+ expect( options ).to.deep.equal( {
+ upsert: true,
+ new: false,
+ fields: {
+ [ `${root}.last` ]: 1,
+ [ `${root}.lastState` ]: 1,
+ [ `${root}.lastStatus` ]: 1,
+ [ `${root}.${tok_id}.status` ]: 1,
+ },
+ } );
+
+ callback( null, given );
+ },
+
+ update() {},
+ findOne() {},
+ };
+
+ return expect(
+ new Sut( coll, field, () => timestamp )
+ .updateToken( did, ns, tok_id, tok_type, data )
+ ).to.eventually.deep.equal( expected );
+ } ) );
+
+
+ it( 'proxies error to callback', () =>
+ {
+ const expected_error = Error( "expected error" );
+
+ const coll: MongoCollection = {
+ findAndModify( _selector, _sort, _update, _options, callback )
+ {
+ callback( expected_error, {} );
+ },
+
+ update() {},
+ findOne() {},
+ };
+
+ return expect(
+ new Sut( coll, 'foo', () => <UnixTimestamp>0 ).updateToken(
+ <DocumentId>0,
+ <TokenNamespace>'ns',
+ <TokenId>'id',
+ TokenState.DONE,
+ null
+ )
+ ).to.eventually.be.rejectedWith( expected_error );
+ } );
+ } );
+
+
+ describe( '#getToken', () =>
+ {
+ const field = 'get_field';
+ const did = <DocumentId>12345;
+ const ns = <TokenNamespace>'get_ns';
+
+ const expected_status: TokenStatus = {
+ type: TokenState.ACTIVE,
+ timestamp: <UnixTimestamp>0,
+ data: "",
+ };
+
+ const last_tok_id = <TokenId>'last-tok';
+
+ const last: TokenStatus = {
+ type: TokenState.DEAD,
+ timestamp: <UnixTimestamp>4567,
+ data: "last token",
+ };
+
+ ( <[string, TokenId, TokenQueryResult, TokenData|null, any, any][]>[
+ [
+ 'retrieves token by id',
+ <TokenId>'tok123',
+ {
+ [field]: {
+ [ns]: {
+ last: last_tok_id,
+ lastState: {
+ [ TokenState.ACTIVE ]: last_tok_id,
+ [ TokenState.DONE ]: last_tok_id,
+ },
+ lastStatus: last,
+
+ tok123: {
+ status: expected_status,
+ statusLog: [ expected_status ],
+ },
+ },
+ },
+ },
+ {
+ id: <TokenId>'tok123',
+ status: expected_status,
+ prev_status: expected_status,
+ prev_state: {
+ [ TokenState.ACTIVE ]: last_tok_id,
+ [ TokenState.DONE ]: last_tok_id,
+ },
+ prev_last: {
+ id: last_tok_id,
+ status: last,
+ prev_status: null,
+ prev_last: null,
+ prev_state: {},
+ }
+ },
+ null,
+ null,
+ ],
+
+ [
+ 'rejects for namespace if token is not found',
+ <TokenId>'tok123',
+ {
+ [field]: {
+ [ns]: {
+ last: last_tok_id,
+ lastStatus: last,
+
+ // just to make sure we don't grab another tok
+ othertok: {
+ status: expected_status,
+ statusLog: [ expected_status ],
+ },
+ },
+ },
+ },
+ null,
+ `${ns}.tok123`,
+ {
+ doc_id: did,
+ ns: ns,
+ token_id: 'tok123',
+ },
+ ],
+
+ [
+ 'rejects if namespace is not found',
+ <TokenId>'tok123',
+ {
+ [field]: {},
+ },
+ null,
+ ns,
+ {
+ doc_id: did,
+ ns: ns,
+ },
+ ],
+
+ [
+ 'returns last modified token given no token id',
+ <TokenId>'',
+ {
+ [field]: {
+ [ns]: {
+ last: last_tok_id,
+ lastState: {
+ [ TokenState.DEAD ]: last_tok_id,
+ },
+ lastStatus: last,
+
+ [ last_tok_id ]: {
+ status: expected_status,
+ statusLog: [ expected_status ],
+ },
+ },
+ },
+ },
+ {
+ id: last_tok_id,
+ status: last,
+ prev_status: last,
+ prev_state: {
+ [ TokenState.DEAD ]: last_tok_id,
+ },
+ prev_last: {
+ id: last_tok_id,
+ status: last,
+ prev_status: null,
+ prev_last: null,
+ prev_state: {},
+ }
+ },
+ null,
+ null,
+ ],
+
+ [
+ 'rejects unknown last modified token given no token id',
+ <TokenId>'',
+ {
+ [field]: {
+ [ns]: {},
+ },
+ },
+ null,
+ ns,
+ {
+ doc_id: did,
+ ns: ns,
+ },
+ ],
+
+ [
+ 'rejects unknown namespace token given no token id',
+ <TokenId>'',
+ {
+ [field]: {},
+ },
+ null,
+ ns,
+ {
+ doc_id: did,
+ ns: ns,
+ },
+ ],
+ ] ).forEach( ( [ label, tok_id, dbresult, expected, fmsg, fcontext ] ) =>
+ it( label, () =>
+ {
+ const coll: MongoCollection = {
+ findOne( selector, { fields }, callback )
+ {
+ const expected_fields = {
+ [ `${field}.${ns}.last` ]: 1,
+ [ `${field}.${ns}.lastState` ]: 1,
+ [ `${field}.${ns}.lastStatus` ]: 1,
+ };
+
+ if ( tok_id )
+ {
+ expected_fields[ `${field}.${ns}.${tok_id}` ] = 1;
+ }
+
+ expect( fields ).to.deep.equal( expected_fields );
+ expect( selector ).to.deep.equal( { id: did } );
+
+ callback( null, dbresult );
+ },
+
+ update() {},
+ findAndModify() {},
+ };
+
+ const result = new Sut( coll, field, () => <UnixTimestamp>0 )
+ .getToken( did, ns, tok_id );
+
+ return ( fmsg !== null )
+ ? Promise.all( [
+ expect( result ).to.eventually.be.rejectedWith(
+ UnknownTokenError, fmsg
+ ),
+ expect( result ).to.eventually.be.rejectedWith(
+ UnknownTokenError, ''+did
+ ),
+ result.catch( e =>
+ {
+ if ( !hasContext( e ) )
+ {
+ // TS will soon have type assertions and
+ // then this conditional and return can be
+ // removed
+ return expect.fail();
+ }
+
+ return expect( e.context ).to.deep.equal( fcontext );
+ } ),
+ ] )
+ : expect( result ).to.eventually.deep.equal( expected );
+ } )
+ );
+
+
+ it( 'proxies error to callback', () =>
+ {
+ const expected_error = Error( "expected error" );
+
+ const coll: MongoCollection = {
+ findOne( _selector, _fields, callback )
+ {
+ callback( expected_error, {} );
+ },
+
+ update() {},
+ findAndModify() {},
+ };
+
+ return expect(
+ new Sut( coll, 'foo', () => <UnixTimestamp>0 )
+ .getToken( <DocumentId>0, <TokenNamespace>'ns', <TokenId>'id' )
+ ).to.eventually.be.rejectedWith( expected_error );
+ } );
+ } );
+} );
diff --git a/test/server/token/store/PersistentTokenStoreTest.ts b/test/server/token/store/PersistentTokenStoreTest.ts
new file mode 100644
index 0000000..ff2409b
--- /dev/null
+++ b/test/server/token/store/PersistentTokenStoreTest.ts
@@ -0,0 +1,441 @@
+/**
+ * Tests persistent token management
+ *
+ * Copyright (C) 2010-2019 R-T Specialty, LLC.
+ *
+ * This file is part of the Liza Data Collection Framework.
+ *
+ * liza is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as
+ * published by the Free Software Foundation, either version 3 of the
+ * License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+import {
+ PersistentTokenStore as Sut
+} from "../../../../src/server/token/store/PersistentTokenStore";
+
+import { TokenDao, TokenData } from "../../../../src/server/token/TokenDao";
+import { DocumentId } from "../../../../src/document/Document";
+
+import {
+ Token,
+ TokenId,
+ TokenNamespace,
+ TokenState,
+} from "../../../../src/server/token/Token";
+
+import { expect, use as chai_use } from 'chai';
+chai_use( require( 'chai-as-promised' ) );
+
+
+describe( 'PersistentTokenStore', () =>
+{
+ // required via the ctor, but this name is just used to denote that it's
+ // not used for a particular test
+ const voidIdgen = () => <TokenId>"00";
+
+
+ describe( '#lookupToken', () =>
+ {
+ const doc_id = <DocumentId>5;
+ const ns = <TokenNamespace>'namespace';
+ const token_id = <TokenId>'token';
+
+ const expected_ts = <UnixTimestamp>12345;
+ const expected_data = "token data";
+
+ ( <[string, TokenData, Token<TokenState>][]>[
+ [
+ "returns existing token with matching last",
+ {
+ id: token_id,
+
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: {
+ id: token_id,
+
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ },
+ },
+ {
+ id: token_id,
+ state: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: false,
+ last_created: false,
+ },
+ ],
+
+ [
+ "returns existing token with mismatched last",
+ {
+ id: token_id,
+
+ status: {
+ type: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: {
+ id: <TokenId>'something-else',
+
+ status: {
+ type: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ },
+ },
+ {
+ id: token_id,
+ state: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: true,
+ last_created: false,
+ },
+ ],
+
+ [
+ "returns existing token with set last created",
+ {
+ id: token_id,
+
+ status: {
+ type: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ prev_state: {
+ [ TokenState.ACTIVE ]: token_id,
+ },
+ },
+ {
+ id: token_id,
+ state: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: true,
+ last_created: true,
+ },
+ ],
+ ] ).forEach( ( [ label, dbdata, expected ] ) => it( label, () =>
+ {
+ const dao = new class implements TokenDao
+ {
+ getToken(
+ given_doc_id: DocumentId,
+ given_ns: TokenNamespace,
+ given_token_id: TokenId
+ )
+ {
+ expect( given_doc_id ).to.equal( doc_id );
+ expect( given_ns ).to.equal( ns );
+ expect( given_token_id ).to.equal( token_id );
+
+ return Promise.resolve( dbdata );
+ }
+
+ updateToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+ }();
+
+ return expect(
+ new Sut( dao, doc_id, ns, voidIdgen )
+ .lookupToken( token_id )
+ )
+ .to.eventually.deep.equal( expected );
+ } ) );
+
+
+ it( "propagates database errors", () =>
+ {
+ const doc_id = <DocumentId>0;
+ const ns = <TokenNamespace>'badns';
+ const token_id = <TokenId>'badtok';
+
+ const expected_e = new Error( "test error" );
+
+ const dao = new class implements TokenDao
+ {
+ getToken()
+ {
+ return Promise.reject( expected_e );
+ }
+
+ updateToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+ }();
+
+ return expect(
+ new Sut( dao, doc_id, ns, voidIdgen )
+ .lookupToken( token_id )
+ ).to.eventually.be.rejectedWith( expected_e );
+ } );
+ } );
+
+
+ describe( '#createToken', () =>
+ {
+ const doc_id = <DocumentId>5;
+ const ns = <TokenNamespace>'namespace';
+ const token_id = <TokenId>'token';
+
+ const expected_ts = <UnixTimestamp>12345;
+ const expected_data = "token data";
+
+ ( <[string, TokenData, Token<TokenState>][]>[
+ [
+ "creates token with last_mismatch given last",
+ {
+ id: token_id,
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+
+ prev_last: {
+ id: <TokenId>'something-else',
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ },
+
+ prev_state: {},
+ },
+ {
+ id: token_id,
+ state: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: true,
+ last_created: true,
+ },
+ ],
+
+ [
+ "creates token with last_mismatch given null last",
+ {
+ id: token_id,
+ status: {
+ type: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ prev_state: {},
+ },
+ {
+ id: token_id,
+ state: TokenState.ACTIVE,
+ timestamp: expected_ts,
+ data: expected_data,
+ last_mismatch: true,
+ last_created: true,
+ },
+ ],
+ ] ).forEach( ( [ label, dbdata, expected ] ) => it( label, () =>
+ {
+ const dao = new class implements TokenDao
+ {
+ getToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+
+ updateToken(
+ given_doc_id: DocumentId,
+ given_ns: TokenNamespace,
+ given_token_id: TokenId,
+ given_type: TokenState,
+ given_data: string | null,
+ )
+ {
+ expect( given_doc_id ).to.equal( doc_id );
+ expect( given_ns ).to.equal( ns );
+ expect( given_token_id ).to.equal( token_id );
+ expect( given_type ).to.equal( TokenState.ACTIVE );
+ expect( given_data ).to.equal( null );
+
+ return Promise.resolve( dbdata );
+ }
+ }();
+
+ return expect(
+ new Sut( dao, doc_id, ns, () => token_id )
+ .createToken()
+ ).to.eventually.deep.equal( expected );
+ } ) );
+ } );
+
+
+ // each of the state changes do the same thing, just behind a
+ // type-restrictive API
+ const expected_ts = <UnixTimestamp>123;
+
+ ( <[keyof Sut, Token<TokenState>, string, Token<TokenState>][]>[
+ [
+ 'completeToken',
+ {
+ id: <TokenId>'complete-test',
+ state: TokenState.ACTIVE,
+ timestamp: <UnixTimestamp>0,
+ data: "",
+ last_mismatch: true,
+ last_created: true,
+ },
+ "complete-data",
+ {
+ id: <TokenId>'complete-test',
+ state: TokenState.DONE,
+ timestamp: expected_ts,
+ data: "complete-data",
+ last_mismatch: true,
+ last_created: true,
+ },
+ ],
+
+ [
+ 'acceptToken',
+ {
+ id: <TokenId>'accept-test',
+ state: TokenState.DONE,
+ timestamp: <UnixTimestamp>0,
+ data: "accept",
+ last_mismatch: true,
+ last_created: true,
+ },
+ "accept-data",
+ {
+ id: <TokenId>'accept-test',
+ state: TokenState.ACCEPTED,
+ timestamp: expected_ts,
+ data: "accept-data",
+ last_mismatch: true,
+ last_created: true,
+ },
+ ],
+
+ [
+ 'killToken',
+ {
+ id: <TokenId>'kill-test',
+ state: TokenState.ACTIVE,
+ timestamp: <UnixTimestamp>0,
+ data: "kill",
+ last_mismatch: true,
+ last_created: true,
+ },
+ "kill-data",
+ {
+ id: <TokenId>'kill-test',
+ state: TokenState.DEAD,
+ timestamp: expected_ts,
+ data: "kill-data",
+ last_mismatch: true,
+ last_created: true,
+ },
+ ],
+ ] ).forEach( ( [ method, token, data, expected ] ) => describe( `#${method}`, () =>
+ {
+ const doc_id = <DocumentId>1234;
+ const ns = <TokenNamespace>'update-ns';
+
+ it( "changes token state", () =>
+ {
+ const dao = new class implements TokenDao
+ {
+ getToken()
+ {
+ return Promise.reject( "unused method" );
+ }
+
+ updateToken(
+ given_doc_id: DocumentId,
+ given_ns: TokenNamespace,
+ given_token_id: TokenId,
+ given_type: TokenState,
+ given_data: string | null,
+ )
+ {
+ expect( given_doc_id ).to.equal( doc_id );
+ expect( given_ns ).to.equal( ns );
+ expect( given_token_id ).to.equal( token.id );
+ expect( given_type ).to.equal( expected.state );
+ expect( given_data ).to.equal( data );
+
+ return Promise.resolve( {
+ id: token.id,
+ status: {
+ // purposefully hard-coded, since this is ignored
+ type: TokenState.ACTIVE,
+
+ timestamp: expected_ts,
+ data: given_data,
+ },
+
+ prev_status: null,
+ prev_last: null,
+ prev_state: {
+ [ TokenState.ACTIVE ]: token.id,
+ },
+ } );
+ }
+ }();
+
+ // this discards some type information for the sake of dynamic
+ // dispatch, so it's not testing the state transition
+ // restrictions that are enforced by the compiler
+ return expect(
+ new Sut( dao, doc_id, ns, voidIdgen )[ method ](
+ <any>token, data
+ )
+ ).to.eventually.deep.equal( expected );
+ } );
+ } ) );
+} );
diff --git a/tsconfig.json b/tsconfig.json
new file mode 100644
index 0000000..089f208
--- /dev/null
+++ b/tsconfig.json
@@ -0,0 +1,20 @@
+{
+ "compilerOptions": {
+ "incremental": true,
+ "inlineSourceMap": true,
+ "lib": [ "es6" ],
+ "module": "commonjs",
+ "noImplicitReturns": true,
+ "noUnusedLocals": true,
+ "noUnusedParameters": true,
+ "strict": true,
+ "target": "es5",
+ "baseUrl": ".",
+ "paths": {
+ "*": [
+ "node_modules/*",
+ "src/types/*",
+ ],
+ },
+ }
+}