code
stringlengths 10
343k
| docstring
stringlengths 36
21.9k
| func_name
stringlengths 1
3.35k
| language
stringclasses 1
value | repo
stringlengths 7
58
| path
stringlengths 4
131
| url
stringlengths 44
195
| license
stringclasses 5
values |
---|---|---|---|---|---|---|---|
module.exports = function findOrCreate( /* criteria?, newRecord?, explicitCbMaybe?, meta? */ ) {
// Verify `this` refers to an actual Sails/Waterline model.
verifyModelMethodContext(this);
// Set up a few, common local vars for convenience / familiarity.
var WLModel = this;
var orm = this.waterline;
var modelIdentity = this.identity;
// Build an omen for potential use in the asynchronous callback below.
var omen = buildOmen(findOrCreate);
// Build query w/ initial, universal keys.
var query = {
method: 'findOrCreate',
using: modelIdentity
};
// ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗
// ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝
// ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗
// ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║
// ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║
// ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝
//
// The `explicitCbMaybe` callback, if one was provided.
var explicitCbMaybe;
// Handle the various supported usage possibilities
// (locate the `explicitCbMaybe` callback, and extend the `query` dictionary)
//
// > Note that we define `args` to minimize the chance of this "variadics" code
// > introducing any unoptimizable performance problems. For details, see:
// > https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments
// > •=> `.length` is just an integer, this doesn't leak the `arguments` object itself
// > •=> `i` is always valid index in the arguments object
var args = new Array(arguments.length);
for (var i = 0; i < args.length; ++i) {
args[i] = arguments[i];
}
// • findOrCreate(criteria, newRecord, explicitCbMaybe, ...)
query.criteria = args[0];
query.newRecord = args[1];
explicitCbMaybe = args[2];
query.meta = args[3];
// ██████╗ ███████╗███████╗███████╗██████╗
// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗
// ██║ ██║█████╗ █████╗ █████╗ ██████╔╝
// ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗
// ██████╔╝███████╗██║ ███████╗██║ ██║
// ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝
//
// ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗
// ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗
// ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║
// ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║
// ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝
// ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝
//
// ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐
// ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││
// └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘
// ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐
// │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│
// └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘
// If an explicit callback function was specified, then immediately run the logic below
// and trigger the explicit callback when the time comes. Otherwise, build and return
// a new Deferred now. (If/when the Deferred is executed, the logic below will run.)
return parley(
function (done){
// Otherwise, IWMIH, we know that it's time to actually do some stuff.
// So...
//
// ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗
// ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝
// █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗
// ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝
// ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗
// ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴
//
// Forge a stage 2 query (aka logical protostatement)
try {
forgeStageTwoQuery(query, orm);
} catch (e) {
switch (e.code) {
case 'E_INVALID_CRITERIA':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'Invalid criteria.\n' +
'Details:\n' +
' ' + e.details + '\n'
}, omen)
);
case 'E_INVALID_NEW_RECORDS':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'Invalid new record(s).\n'+
'Details:\n'+
' '+e.details+'\n'
}, omen)
);
case 'E_NOOP':
// If the criteria is deemed to be a no-op, then normalize it into a standard format.
// This way, it will continue to represent a no-op as we proceed below, so the `findOne()`
// call will also come back with an E_NOOP, and so then it will go on to do a `.create()`.
// And most importantly, this way we don't have to worry about the case where the no-op
// was caused by an edge case like `false` (we need to be able to munge the criteria --
// i.e. deleting the `limit`).
var STD_NOOP_CRITERIA = { where: { or: [] } };
query.criteria = STD_NOOP_CRITERIA;
break;
default:
return done(e);
}
}// >-•
// Remove the `limit`, `skip`, and `sort` clauses so that our findOne query is valid.
// (This is because they were automatically attached above.)
delete query.criteria.limit;
delete query.criteria.skip;
delete query.criteria.sort;
// ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬┌┐┌┌┬┐ ┌─┐┌┐┌┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ ├┤ ││││ ││ │ ││││├┤ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └ ┴┘└┘─┴┘ └─┘┘└┘└─┘ └─┘└└─┘└─┘┴└─ ┴
// Note that we pass in `meta` here, which ensures we're on the same db connection.
// (provided one was explicitly passed in!)
WLModel.findOne(query.criteria, function _afterPotentiallyFinding(err, foundRecord) {
if (err) {
return done(err);
}
// Note that we pass through a flag as the third argument to our callback,
// indicating whether a new record was created.
if (foundRecord) {
return done(undefined, foundRecord, false);
}
// So that the create query is valid, check if the primary key value was
// automatically set to `null` by FS2Q (i.e. because it was unspecified.)
// And if so, remove it.
//
// > IWMIH, we know this was automatic because, if `null` had been
// > specified explicitly, it would have already caused an error in
// > our call to FS2Q above (`null` is NEVER a valid PK value)
var pkAttrName = WLModel.primaryKey;
var wasPKValueCoercedToNull = _.isNull(query.newRecord[pkAttrName]);
if (wasPKValueCoercedToNull) {
delete query.newRecord[pkAttrName];
}
// Build a modified shallow clone of the originally-provided `meta` from
// userland, but that also has `fetch: true` and the private/experimental
// flag, `skipEncryption: true`. For context on the bit about encryption,
// see: https://github.com/balderdashy/sails/issues/4302#issuecomment-363883885
// > PLEASE DO NOT RELY ON `skipEncryption` IN YOUR OWN CODE- IT COULD CHANGE
// > AT ANY TIME AND BREAK YOUR APP OR PLUGIN!
var modifiedMetaForCreate = _.extend({}, query.meta || {}, {
fetch: true,
skipEncryption: true
});
// ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ │ ├┬┘├┤ ├─┤ │ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘└└─┘└─┘┴└─ ┴
WLModel.create(query.newRecord, function _afterCreating(err, createdRecord) {
if (err) {
return done(err);
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// FUTURE: Instead of preventing projections (`omit`/`select`) for findOrCreate,
// instead allow them and just modify the newly created record after the fact
// (i.e. trim properties in-memory).
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// Pass the newly-created record to our callback.
// > Note we set the `wasCreated` flag to `true` in this case.
return done(undefined, createdRecord, true);
}, modifiedMetaForCreate);//</.create()>
}, query.meta);//</.findOne()>
},
explicitCbMaybe,
_.extend(DEFERRED_METHODS, {
// Provide access to this model for use in query modifier methods.
_WLModel: WLModel,
// Set up initial query metadata.
_wlQueryInfo: query,
})
);//</parley>
}; | findOrCreate()
Find the record matching the specified criteria. If no record exists or more
than one record matches the criteria, an error will be returned.
```
// Ensure an a pet with type dog exists
PetType.findOrCreate({ type: 'dog' }, { name: 'Pretend pet type', type: 'dog' })
.exec(function(err, petType, wasCreated) {
// ...
});
```
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Usage without deferred object:
================================================
@param {Dictionary?} criteria
@param {Dictionary} newRecord
@param {Function?} explicitCbMaybe
Callback function to run when query has either finished successfully or errored.
(If unspecified, will return a Deferred object instead of actually doing anything.)
@param {Ref?} meta
For internal use.
@returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The underlying query keys:
==============================
@qkey {Dictionary?} criteria
@qkey {Dictionary?} newRecord
@qkey {Dictionary?} meta
@qkey {String} using
@qkey {String} method
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | findOrCreate ( ) | javascript | balderdashy/waterline | lib/waterline/methods/find-or-create.js | https://github.com/balderdashy/waterline/blob/master/lib/waterline/methods/find-or-create.js | MIT |
module.exports = function update(criteria, valuesToSet, explicitCbMaybe, metaContainer) {
// Verify `this` refers to an actual Sails/Waterline model.
verifyModelMethodContext(this);
// Set up a few, common local vars for convenience / familiarity.
var WLModel = this;
var orm = this.waterline;
var modelIdentity = this.identity;
// Build an omen for potential use in the asynchronous callback below.
var omen = buildOmen(update);
// Build initial query.
var query = {
method: 'update',
using: modelIdentity,
criteria: criteria,
valuesToSet: valuesToSet,
meta: metaContainer
};
// ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗
// ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝
// ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗
// ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║
// ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║
// ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝
//
// N/A
// (there are no out-of-order, optional arguments)
// ██████╗ ███████╗███████╗███████╗██████╗
// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗
// ██║ ██║█████╗ █████╗ █████╗ ██████╔╝
// ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗
// ██████╔╝███████╗██║ ███████╗██║ ██║
// ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝
//
// ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗
// ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗
// ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║
// ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║
// ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝
// ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝
//
// ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐
// ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││
// └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘
// ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐
// │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│
// └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘
// If a callback function was not specified, then build a new Deferred and bail now.
//
// > This method will be called AGAIN automatically when the Deferred is executed.
// > and next time, it'll have a callback.
return parley(
function (done){
// Otherwise, IWMIH, we know that a callback was specified.
// So...
// ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗
// ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝
// █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗
// ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝
// ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗
// ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴
//
// Forge a stage 2 query (aka logical protostatement)
// This ensures a normalized format.
try {
forgeStageTwoQuery(query, orm);
} catch (e) {
switch (e.code) {
case 'E_INVALID_CRITERIA':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'Invalid criteria.\n'+
'Details:\n'+
' '+e.details+'\n'
}, omen)
);
case 'E_INVALID_VALUES_TO_SET':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'Cannot perform update with the provided values.\n'+
'Details:\n'+
' '+e.details+'\n'
}, omen)
);
case 'E_NOOP':
// Determine the appropriate no-op result.
// If `fetch` meta key is set, use `[]`-- otherwise use `undefined`.
//
// > Note that future versions might simulate output from the raw driver.
// > (e.g. `{ numRecordsUpdated: 0 }`)
// > See: https://github.com/treelinehq/waterline-query-docs/blob/master/docs/results.md#update
var noopResult = undefined;
if (query.meta && query.meta.fetch) {
noopResult = [];
}//>-
return done(undefined, noopResult);
default:
return done(e);
}
}
// ╦ ╦╔═╗╔╗╔╔╦╗╦ ╔═╗ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─
// ╠═╣╠═╣║║║ ║║║ ║╣ BEFORE │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐
// ╩ ╩╩ ╩╝╚╝═╩╝╩═╝╚═╝ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴
// Run the "before" lifecycle callback, if appropriate.
(function(proceed) {
// If the `skipAllLifecycleCallbacks` meta flag was set, don't run any of
// the methods.
if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) {
return proceed(undefined, query);
}
if (!_.has(WLModel._callbacks, 'beforeUpdate')) {
return proceed(undefined, query);
}
WLModel._callbacks.beforeUpdate(query.valuesToSet, function(err){
if (err) { return proceed(err); }
return proceed(undefined, query);
});
})(function(err, query) {
if (err) {
return done(err);
}
// ================================================================================
// FUTURE: potentially bring this back (but also would need the `omit clause`)
// ================================================================================
// // Before we get to forging again, save a copy of the stage 2 query's
// // `select` clause. We'll need this later on when processing the resulting
// // records, and if we don't copy it now, it might be damaged by the forging.
// //
// // > Note that we don't need a deep clone.
// // > (That's because the `select` clause is only 1 level deep.)
// var s2QSelectClause = _.clone(query.criteria.select);
// ================================================================================
// ╔═╗╦ ╦╔═╗╔═╗╦╔═ ┌─┐┌─┐┬─┐ ┌─┐┌┐┌┬ ┬
// ║ ╠═╣║╣ ║ ╠╩╗ ├┤ │ │├┬┘ ├─┤│││└┬┘
// ╚═╝╩ ╩╚═╝╚═╝╩ ╩ └ └─┘┴└─ ┴ ┴┘└┘ ┴
// ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ ┬─┐┌─┐┌─┐┌─┐┌┬┐┌─┐
// │ │ ││ │ ├┤ │ │ ││ ││││ ├┬┘├┤ └─┐├┤ │ └─┐
// └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ ┴└─└─┘└─┘└─┘ ┴ └─┘
// Also removes them from the valuesToSet before sending to the adapter.
var collectionResets = {};
_.each(WLModel.attributes, function _eachKnownAttrDef(attrDef, attrName) {
if (attrDef.collection) {
// Only track a reset if a value was explicitly specified for this collection assoc.
// (All we have to do is just check for truthiness, since we've already done FS2Q at this point)
if (query.valuesToSet[attrName]) {
collectionResets[attrName] = query.valuesToSet[attrName];
// Remove the collection value from the valuesToSet because the adapter
// doesn't need to do anything during the initial update.
delete query.valuesToSet[attrName];
}
}
});//</ each known attribute def >
// Hold a variable for the queries `meta` property that could possibly be
// changed by us later on.
var modifiedMetaForCollectionResets;
// If any collection resets were specified, force `fetch: true` (meta key)
// so that we can use it below.
if (_.keys(collectionResets).length > 0) {
// Build a modified shallow clone of the originally-provided `meta`
// that also has `fetch: true`.
modifiedMetaForCollectionResets = _.extend({}, query.meta || {}, { fetch: true });
}//>-
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴
// Now, destructively forge this S2Q into a S3Q.
try {
query = forgeStageThreeQuery({
stageTwoQuery: query,
identity: modelIdentity,
transformer: WLModel._transformer,
originalModels: orm.collections
});
} catch (e) { return done(e); }
// ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗
// └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝
// └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═
// Grab the appropriate adapter method and call it.
var adapter = WLModel._adapter;
if (!adapter.update) {
return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.'));
}
// Allow the query to possibly use the modified meta
if (modifiedMetaForCollectionResets) {
query.meta = modifiedMetaForCollectionResets;
}
adapter.update(WLModel.datastore, query, function _afterTalkingToAdapter(err, rawAdapterResult) {
if (err) {
err = forgeAdapterError(err, omen, 'update', modelIdentity, orm);
return done(err);
}//-•
// ╔═╗╔╦╗╔═╗╔═╗ ╔╗╔╔═╗╦ ╦ ┬ ┬┌┐┌┬ ┌─┐┌─┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬
// ╚═╗ ║ ║ ║╠═╝ ║║║║ ║║║║ │ │││││ ├┤ └─┐└─┐ ╠╣ ║╣ ║ ║ ╠═╣ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘
// ╚═╝ ╩ ╚═╝╩ ╝╚╝╚═╝╚╩╝ooo └─┘┘└┘┴─┘└─┘└─┘└─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴
// ┬ ┬┌─┐┌─┐ ┌─┐┌─┐┌┬┐ ┌┬┐┌─┐ ┌┬┐┬─┐┬ ┬┌─┐
// │││├─┤└─┐ └─┐├┤ │ │ │ │ │ ├┬┘│ │├┤
// └┴┘┴ ┴└─┘ └─┘└─┘ ┴ ┴ └─┘ ┴ ┴└─└─┘└─┘
var fetch = modifiedMetaForCollectionResets || (_.has(query.meta, 'fetch') && query.meta.fetch);
// If `fetch` was not enabled, return.
if (!fetch) {
// > Note: This `if` statement is a convenience, for cases where the result from
// > the adapter may have been coerced from `undefined` to `null` automatically.
// > (we want it to be `undefined` still, for consistency)
if (_.isNull(rawAdapterResult)) {
return done();
}//-•
if (!_.isUndefined(rawAdapterResult)) {
console.warn('\n'+
'Warning: Unexpected behavior in database adapter:\n'+
'Since `fetch` is NOT enabled, this adapter (for datastore `'+WLModel.datastore+'`)\n'+
'should NOT have sent back anything as the 2nd argument when triggering the callback\n'+
'from its `update` method. But it did! And since it\'s an array, displaying this\n'+
'warning to help avoid confusion and draw attention to the bug. Specifically, got:\n'+
util.inspect(rawAdapterResult, {depth:5})+'\n'+
'(Ignoring it and proceeding anyway...)'+'\n'
);
}//>-
return done();
}//-•
// IWMIH then we know that `fetch: true` meta key was set, and so the
// adapter should have sent back an array.
// Verify that the raw result from the adapter is an array.
if (!_.isArray(rawAdapterResult)) {
return done(new Error(
'Unexpected behavior in database adapter: Since `fetch: true` was enabled, this adapter '+
'(for datastore `'+WLModel.datastore+'`) should have sent back an array of records as the '+
'2nd argument when triggering the callback from its `update` method. But instead, got: '+
util.inspect(rawAdapterResult, {depth:5})+''
));
}//-•
// Unserialize each record
var transformedRecords;
try {
// Attempt to convert the column names in each record back into attribute names.
transformedRecords = rawAdapterResult.map(function(record) {
return WLModel._transformer.unserialize(record);
});
} catch (e) { return done(e); }
// Check the records to verify compliance with the adapter spec,
// as well as any issues related to stale data that might not have been
// been migrated to keep up with the logical schema (`type`, etc. in
// attribute definitions).
try {
processAllRecords(transformedRecords, query.meta, modelIdentity, orm);
} catch (e) { return done(e); }
// ┌─┐┌─┐┬ ┬ ╦═╗╔═╗╔═╗╦ ╔═╗╔═╗╔═╗ ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗╦╔═╗╔╗╔ ┌─┐┌─┐┬─┐
// │ ├─┤│ │ ╠╦╝║╣ ╠═╝║ ╠═╣║ ║╣ ║ ║ ║║ ║ ║╣ ║ ║ ║║ ║║║║ ├┤ │ │├┬┘
// └─┘┴ ┴┴─┘┴─┘ ╩╚═╚═╝╩ ╩═╝╩ ╩╚═╝╚═╝ ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ ╩╚═╝╝╚╝ └ └─┘┴└─
// ┌─┐─┐ ┬┌─┐┬ ┬┌─┐┬┌┬┐┬ ┬ ┬ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐┌┬┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
// ├┤ ┌┴┬┘├─┘│ ││ │ │ │ └┬┘───└─┐├─┘├┤ │ │├┤ │├┤ ││ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐
// └─┘┴ └─┴ ┴─┘┴└─┘┴ ┴ ┴─┘┴ └─┘┴ └─┘└─┘┴└ ┴└─┘─┴┘ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘
var targetIds = _.pluck(transformedRecords, WLModel.primaryKey);
async.each(_.keys(collectionResets), function _eachReplaceCollectionOp(collectionAttrName, next) {
WLModel.replaceCollection(targetIds, collectionAttrName, collectionResets[collectionAttrName], function(err){
if (err) { return next(err); }
return next();
}, query.meta);
},// ~∞%°
function _afterReplacingAllCollections(err) {
if (err) { return done(err); }
// ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┬ ┬┌─┐┌┬┐┌─┐┌┬┐┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─
// ╠═╣╠╣ ║ ║╣ ╠╦╝ │ │├─┘ ││├─┤ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐
// ╩ ╩╚ ╩ ╚═╝╩╚═ └─┘┴ ─┴┘┴ ┴ ┴ └─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴
// Run "after" lifecycle callback AGAIN and AGAIN- once for each record.
// ============================================================
// FUTURE: look into this
// (we probably shouldn't call this again and again--
// plus what if `fetch` is not in use and you want to use an LC?
// Then again- the right answer isn't immediately clear. And it
// probably not worth breaking compatibility until we have a much
// better solution)
// ============================================================
async.each(transformedRecords, function _eachRecord(record, next) {
// If the `skipAllLifecycleCallbacks` meta flag was set, don't run any of
// the methods.
if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) {
return next();
}
// Skip "after" lifecycle callback, if not defined.
if (!_.has(WLModel._callbacks, 'afterUpdate')) {
return next();
}
// Otherwise run it.
WLModel._callbacks.afterUpdate(record, function _afterMaybeRunningAfterUpdateForThisRecord(err) {
if (err) {
return next(err);
}
return next();
});
},// ~∞%°
function _afterIteratingOverRecords(err) {
if (err) {
return done(err);
}
return done(undefined, transformedRecords);
});//</ async.each() -- ran "after" lifecycle callback on each record >
});//</ async.each() (calling replaceCollection() for each explicitly-specified plural association) >
});//</ adapter.update() >
});//</ "before" lifecycle callback >
},
explicitCbMaybe,
_.extend(DEFERRED_METHODS, {
// Provide access to this model for use in query modifier methods.
_WLModel: WLModel,
// Set up initial query metadata.
_wlQueryInfo: query,
})
);//</parley>
}; | update()
Update records that match the specified criteria, patching them with
the provided values.
```
// Forgive all debts: Zero out bank accounts with less than $0 in them.
BankAccount.update().where({
balance: { '<': 0 }
}).set({
balance: 0
}).exec(function(err) {
// ...
});
```
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Usage without deferred object:
================================================
@param {Dictionary} criteria
@param {Dictionary} valuesToSet
@param {Function?} explicitCbMaybe
Callback function to run when query has either finished successfully or errored.
(If unspecified, will return a Deferred object instead of actually doing anything.)
@param {Ref?} meta
For internal use.
@returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The underlying query keys:
==============================
@qkey {Dictionary?} criteria
@qkey {Dictionary?} valuesToSet
@qkey {Dictionary?} meta
@qkey {String} using
@qkey {String} method
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | update ( criteria , valuesToSet , explicitCbMaybe , metaContainer ) | javascript | balderdashy/waterline | lib/waterline/methods/update.js | https://github.com/balderdashy/waterline/blob/master/lib/waterline/methods/update.js | MIT |
module.exports = function createEach( /* newRecords?, explicitCbMaybe?, meta? */ ) {
// Verify `this` refers to an actual Sails/Waterline model.
verifyModelMethodContext(this);
// Set up a few, common local vars for convenience / familiarity.
var WLModel = this;
var orm = this.waterline;
var modelIdentity = this.identity;
// Build an omen for potential use in the asynchronous callback below.
var omen = buildOmen(createEach);
// Build query w/ initial, universal keys.
var query = {
method: 'createEach',
using: modelIdentity
};
// ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗
// ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝
// ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗
// ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║
// ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║
// ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝
//
// The `explicitCbMaybe` callback, if one was provided.
var explicitCbMaybe;
// Handle the various supported usage possibilities
// (locate the `explicitCbMaybe` callback)
//
// > Note that we define `args` so that we can insulate access
// > to the arguments provided to this function.
var args = arguments;
(function _handleVariadicUsage(){
// The metadata container, if one was provided.
var _meta;
// First argument always means one thing: the array of new records.
//
// • createEach(newRecords, ...)
// • createEach(..., explicitCbMaybe, _meta)
query.newRecords = args[0];
explicitCbMaybe = args[1];
_meta = args[2];
// Fold in `_meta`, if relevant.
if (!_.isUndefined(_meta)) {
query.meta = _meta;
} // >-
})();
// ██████╗ ███████╗███████╗███████╗██████╗
// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗
// ██║ ██║█████╗ █████╗ █████╗ ██████╔╝
// ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗
// ██████╔╝███████╗██║ ███████╗██║ ██║
// ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝
//
// ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗
// ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗
// ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║
// ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║
// ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝
// ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝
//
// ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐
// ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││
// └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘
// ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐
// │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│
// └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘
// If a callback function was not specified, then build a new Deferred and bail now.
//
// > This method will be called AGAIN automatically when the Deferred is executed.
// > and next time, it'll have a callback.
return parley(
function (done){
// Otherwise, IWMIH, we know that a callback was specified.
// So...
//
// ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗
// ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝
// █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗
// ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝
// ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗
// ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴
//
// Forge a stage 2 query (aka logical protostatement)
try {
forgeStageTwoQuery(query, orm);
} catch (e) {
switch (e.code) {
case 'E_INVALID_NEW_RECORDS':
case 'E_INVALID_META':
return done(
flaverr({
name: 'UsageError',
code: e.code,
message: e.message,
details: e.details,
}, omen)
);
// ^ when the standard usage error message is good enough as-is, without any further customization
case 'E_NOOP':
// Determine the appropriate no-op result.
// If `fetch` meta key is set, use `[]`-- otherwise use `undefined`.
var noopResult = undefined;
if (query.meta && query.meta.fetch) {
noopResult = [];
}//>-
return done(undefined, noopResult);
default:
return done(e);
// ^ when an internal, miscellaneous, or unexpected error occurs
}
} // >-•
// console.log('Successfully forged s2q ::', require('util').inspect(query, {depth:null}));
// ╔╗ ╔═╗╔═╗╔═╗╦═╗╔═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─
// ╠╩╗║╣ ╠╣ ║ ║╠╦╝║╣ │ ├┬┘├┤ ├─┤ │ ├┤ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐
// ╚═╝╚═╝╚ ╚═╝╩╚═╚═╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴
// Determine what to do about running "before" lifecycle callbacks
(function _maybeRunBeforeLC(proceed){
// If the `skipAllLifecycleCallbacks` meta key was enabled, then don't run this LC.
if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) {
return proceed(undefined, query);
}//-•
// If there is no relevant "before" lifecycle callback, then just proceed.
if (!_.has(WLModel._callbacks, 'beforeCreate')) {
return proceed(undefined, query);
}//-•
// IWMIH, run the "before" lifecycle callback on each new record.
async.each(query.newRecords, WLModel._callbacks.beforeCreate, function(err) {
if (err) { return proceed(err); }
return proceed(undefined, query);
});
})(function _afterPotentiallyRunningBeforeLC(err, query) {
if (err) {
return done(err);
}
// ╔═╗╦ ╦╔═╗╔═╗╦╔═ ┌─┐┌─┐┬─┐ ┌─┐┌┐┌┬ ┬
// ║ ╠═╣║╣ ║ ╠╩╗ ├┤ │ │├┬┘ ├─┤│││└┬┘
// ╚═╝╩ ╩╚═╝╚═╝╩ ╩ └ └─┘┴└─ ┴ ┴┘└┘ ┴
// ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ ┬─┐┌─┐┌─┐┌─┐┌┬┐┌─┐
// │ │ ││ │ ├┤ │ │ ││ ││││ ├┬┘├┤ └─┐├┤ │ └─┐
// └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ ┴└─└─┘└─┘└─┘ ┴ └─┘
// Also removes them from the newRecords before sending to the adapter.
var allCollectionResets = [];
_.each(query.newRecords, function _eachRecord(record) {
// Hold the individual resets
var reset = {};
_.each(WLModel.attributes, function _eachKnownAttrDef(attrDef, attrName) {
if (attrDef.collection) {
// Only create a reset if the value isn't an empty array. If the value
// is an empty array there isn't any resetting to do.
if (record[attrName].length) {
reset[attrName] = record[attrName];
}
// Remove the collection value from the newRecord because the adapter
// doesn't need to do anything during the initial create.
delete record[attrName];
}
});//</ each known attr def >
allCollectionResets.push(reset);
});//</ each record >
// Hold a variable for the queries `meta` property that could possibly be
// changed by us later on.
var modifiedMeta;
// If any collection resets were specified, force `fetch: true` (meta key)
// so that the adapter will send back the records and we can use them below
// in order to call `resetCollection()`.
var anyActualCollectionResets = _.any(allCollectionResets, function (reset){
return _.keys(reset).length > 0;
});
if (anyActualCollectionResets) {
// Build a modified shallow clone of the originally-provided `meta`
// that also has `fetch: true`.
modifiedMeta = _.extend({}, query.meta || {}, { fetch: true });
}//>-
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴
// Now, destructively forge this S2Q into a S3Q.
try {
query = forgeStageThreeQuery({
stageTwoQuery: query,
identity: modelIdentity,
transformer: WLModel._transformer,
originalModels: orm.collections
});
} catch (e) { return done(e); }
// ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗
// └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝
// └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═
// Grab the appropriate adapter method and call it.
var adapter = WLModel._adapter;
if (!adapter.createEach) {
return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.'));
}
// Allow the query to possibly use the modified meta
query.meta = modifiedMeta || query.meta;
// console.log('Successfully forged S3Q ::', require('util').inspect(query, {depth:null}));
adapter.createEach(WLModel.datastore, query, function(err, rawAdapterResult) {
if (err) {
err = forgeAdapterError(err, omen, 'createEach', modelIdentity, orm);
return done(err);
}//-•
// ╔═╗╔╦╗╔═╗╔═╗ ╔╗╔╔═╗╦ ╦ ┬ ┬┌┐┌┬ ┌─┐┌─┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬
// ╚═╗ ║ ║ ║╠═╝ ║║║║ ║║║║ │ │││││ ├┤ └─┐└─┐ ╠╣ ║╣ ║ ║ ╠═╣ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘
// ╚═╝ ╩ ╚═╝╩ ╝╚╝╚═╝╚╩╝ooo └─┘┘└┘┴─┘└─┘└─┘└─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴
// ┬ ┬┌─┐┌─┐ ┌─┐┌─┐┌┬┐ ┌┬┐┌─┐ ┌┬┐┬─┐┬ ┬┌─┐
// │││├─┤└─┐ └─┐├┤ │ │ │ │ │ ├┬┘│ │├┤
// └┴┘┴ ┴└─┘ └─┘└─┘ ┴ ┴ └─┘ ┴ ┴└─└─┘└─┘
// If `fetch` was not enabled, return.
var fetch = modifiedMeta || (_.has(query.meta, 'fetch') && query.meta.fetch);
if (!fetch) {
// > Note: This `if` statement is a convenience, for cases where the result from
// > the adapter may have been coerced from `undefined` to `null` automatically.
// > (we want it to be `undefined` still, for consistency)
if (_.isNull(rawAdapterResult)) {
return done();
}//-•
if (!_.isUndefined(rawAdapterResult)) {
console.warn('\n'+
'Warning: Unexpected behavior in database adapter:\n'+
'Since `fetch` is NOT enabled, this adapter (for datastore `'+WLModel.datastore+'`)\n'+
'should NOT have sent back anything as the 2nd argument when triggering the callback\n'+
'from its `createEach` method. But it did -- which is why this warning is being displayed:\n'+
'to help avoid confusion and draw attention to the bug. Specifically, got:\n'+
util.inspect(rawAdapterResult, {depth:5})+'\n'+
'(Ignoring it and proceeding anyway...)'+'\n'
);
}//>-
return done();
}//-•
// IWMIH then we know that `fetch: true` meta key was set, and so the
// adapter should have sent back an array.
// ╔╦╗╦═╗╔═╗╔╗╔╔═╗╔═╗╔═╗╦═╗╔╦╗ ┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐ ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐
// ║ ╠╦╝╠═╣║║║╚═╗╠╣ ║ ║╠╦╝║║║ ├─┤ ││├─┤├─┘ │ ├┤ ├┬┘ ├┬┘├┤ └─┐│ ││ │
// ╩ ╩╚═╩ ╩╝╚╝╚═╝╚ ╚═╝╩╚═╩ ╩ ┴ ┴─┴┘┴ ┴┴ ┴ └─┘┴└─ ┴└─└─┘└─┘└─┘┴─┘┴
// Attempt to convert the records' column names to attribute names.
var transformationErrors = [];
var transformedRecords = [];
_.each(rawAdapterResult, function(record) {
var transformedRecord;
try {
transformedRecord = WLModel._transformer.unserialize(record);
} catch (e) {
transformationErrors.push(e);
}
transformedRecords.push(transformedRecord);
});
if (transformationErrors.length > 0) {
return done(new Error(
'Encountered '+transformationErrors.length+' error(s) processing the record(s) sent back '+
'from the adapter-- specifically, when converting column names back to attribute names. '+
'Details: '+
util.inspect(transformationErrors,{depth:5})+''
));
}//-•
// Check the record to verify compliance with the adapter spec,
// as well as any issues related to stale data that might not have been
// been migrated to keep up with the logical schema (`type`, etc. in
// attribute definitions).
try {
processAllRecords(transformedRecords, query.meta, WLModel.identity, orm);
} catch (e) { return done(e); }
// ┌─┐┌─┐┬ ┬ ╦═╗╔═╗╔═╗╦ ╔═╗╔═╗╔═╗ ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗╦╔═╗╔╗╔ ┌─┐┌─┐┬─┐
// │ ├─┤│ │ ╠╦╝║╣ ╠═╝║ ╠═╣║ ║╣ ║ ║ ║║ ║ ║╣ ║ ║ ║║ ║║║║ ├┤ │ │├┬┘
// └─┘┴ ┴┴─┘┴─┘ ╩╚═╚═╝╩ ╩═╝╩ ╩╚═╝╚═╝ ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ ╩╚═╝╝╚╝ └ └─┘┴└─
// ┌─┐─┐ ┬┌─┐┬ ┬┌─┐┬┌┬┐┬ ┬ ┬ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐┌┬┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
// ├┤ ┌┴┬┘├─┘│ ││ │ │ │ └┬┘───└─┐├─┘├┤ │ │├┤ │├┤ ││ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐
// └─┘┴ └─┴ ┴─┘┴└─┘┴ ┴ ┴─┘┴ └─┘┴ └─┘└─┘┴└ ┴└─┘─┴┘ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘
var argsForEachReplaceOp = [];
_.each(transformedRecords, function (record, idx) {
// Grab the dictionary of collection resets corresponding to this record.
var reset = allCollectionResets[idx];
// If there are no resets, then there's no need to build up a replaceCollection() query.
if (_.keys(reset).length === 0) {
return;
}//-•
// Otherwise, build an array of arrays, where each sub-array contains
// the first three arguments that need to be passed in to `replaceCollection()`.
var targetIds = [ record[WLModel.primaryKey] ];
_.each(_.keys(reset), function (collectionAttrName) {
// (targetId(s), collectionAttrName, associatedPrimaryKeys)
argsForEachReplaceOp.push([
targetIds,
collectionAttrName,
reset[collectionAttrName]
]);
});// </ each key in "reset" >
});//</ each record>
async.each(argsForEachReplaceOp, function _eachReplaceCollectionOp(argsForReplace, next) {
// Note that, by using the same `meta`, we use same db connection
// (if one was explicitly passed in, anyway)
WLModel.replaceCollection(argsForReplace[0], argsForReplace[1], argsForReplace[2], function(err) {
if (err) { return next(err); }
return next();
}, query.meta);
},// ~∞%°
function _afterReplacingAllCollections(err) {
if (err) {
return done(err);
}
// ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─
// ╠═╣╠╣ ║ ║╣ ╠╦╝ │ ├┬┘├┤ ├─┤ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐
// ╩ ╩╚ ╩ ╚═╝╩╚═ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴
(function _maybeRunAfterLC(proceed){
// If the `skipAllLifecycleCallbacks` meta flag was set, don't run the LC.
if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) {
return proceed(undefined, transformedRecords);
}//-•
// If no afterCreate callback defined, just proceed.
if (!_.has(WLModel._callbacks, 'afterCreate')) {
return proceed(undefined, transformedRecords);
}//-•
async.each(transformedRecords, WLModel._callbacks.afterCreate, function(err) {
if (err) {
return proceed(err);
}
return proceed(undefined, transformedRecords);
});
})(function _afterPotentiallyRunningAfterLC(err, transformedRecords) {
if (err) { return done(err); }
// Return the new record.
return done(undefined, transformedRecords);
});//</ ran "after" lifecycle callback, maybe >
});//</async.each()>
});//</adapter.createEach()>
});
},
explicitCbMaybe,
_.extend(DEFERRED_METHODS, {
// Provide access to this model for use in query modifier methods.
_WLModel: WLModel,
// Set up initial query metadata.
_wlQueryInfo: query,
})
);//</parley>
}; | createEach()
Create a set of records in the database.
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Usage without deferred object:
================================================
@param {Array?} newRecords
@param {Function?} explicitCbMaybe
Callback function to run when query has either finished successfully or errored.
(If unspecified, will return a Deferred object instead of actually doing anything.)
@param {Ref?} meta
For internal use.
@returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | createEach ( ) | javascript | balderdashy/waterline | lib/waterline/methods/create-each.js | https://github.com/balderdashy/waterline/blob/master/lib/waterline/methods/create-each.js | MIT |
module.exports = function create(newRecord, explicitCbMaybe, metaContainer) {
// Verify `this` refers to an actual Sails/Waterline model.
verifyModelMethodContext(this);
// Set up a few, common local vars for convenience / familiarity.
var WLModel = this;
var orm = this.waterline;
var modelIdentity = this.identity;
// Build an omen for potential use in the asynchronous callback below.
var omen = buildOmen(create);
// Build initial query.
var query = {
method: 'create',
using: modelIdentity,
newRecord: newRecord,
meta: metaContainer
};
// ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗
// ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝
// ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗
// ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║
// ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║
// ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝
//
// FUTURE: when time allows, update this to match the "VARIADICS" format
// used in the other model methods.
// ██████╗ ███████╗███████╗███████╗██████╗
// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗
// ██║ ██║█████╗ █████╗ █████╗ ██████╔╝
// ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗
// ██████╔╝███████╗██║ ███████╗██║ ██║
// ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝
//
// ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗
// ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗
// ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║
// ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║
// ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝
// ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝
//
// ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐
// ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││
// └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘
// ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐
// │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│
// └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘
// If a callback function was not specified, then build a new Deferred and bail now.
//
// > This method will be called AGAIN automatically when the Deferred is executed.
// > and next time, it'll have a callback.
return parley(
function (done){
// Otherwise, IWMIH, we know that a callback was specified.
// So...
// ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗
// ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝
// █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗
// ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝
// ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗
// ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝
//
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴
//
// Forge a stage 2 query (aka logical protostatement)
// This ensures a normalized format.
try {
forgeStageTwoQuery(query, orm);
} catch (e) {
switch (e.code) {
case 'E_INVALID_NEW_RECORD':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'Invalid new record.\n'+
'Details:\n'+
' '+e.details+'\n'
}, omen)
);
default:
return done(e);
}
}
// ╔╗ ╔═╗╔═╗╔═╗╦═╗╔═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─
// ╠╩╗║╣ ╠╣ ║ ║╠╦╝║╣ │ ├┬┘├┤ ├─┤ │ ├┤ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐
// ╚═╝╚═╝╚ ╚═╝╩╚═╚═╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴
// Determine what to do about running "before" lifecycle callbacks
(function _maybeRunBeforeLC(proceed){
// If the `skipAllLifecycleCallbacks` meta key was enabled, then don't run this LC.
if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) {
return proceed(undefined, query);
}//-•
// If there is no relevant "before" lifecycle callback, then just proceed.
if (!_.has(WLModel._callbacks, 'beforeCreate')) {
return proceed(undefined, query);
}//-•
// IWMIH, run the "before" lifecycle callback.
WLModel._callbacks.beforeCreate(query.newRecord, function(err){
if (err) { return proceed(err); }
return proceed(undefined, query);
});
})(function _afterPotentiallyRunningBeforeLC(err, query) {
if (err) {
return done(err);
}
// ╔═╗╦ ╦╔═╗╔═╗╦╔═ ┌─┐┌─┐┬─┐ ┌─┐┌┐┌┬ ┬
// ║ ╠═╣║╣ ║ ╠╩╗ ├┤ │ │├┬┘ ├─┤│││└┬┘
// ╚═╝╩ ╩╚═╝╚═╝╩ ╩ └ └─┘┴└─ ┴ ┴┘└┘ ┴
// ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ ┬─┐┌─┐┌─┐┌─┐┌┬┐┌─┐
// │ │ ││ │ ├┤ │ │ ││ ││││ ├┬┘├┤ └─┐├┤ │ └─┐
// └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ ┴└─└─┘└─┘└─┘ ┴ └─┘
// Also removes them from the newRecord before sending to the adapter.
var collectionResets = {};
_.each(WLModel.attributes, function _eachKnownAttrDef(attrDef, attrName) {
if (attrDef.collection) {
// Only track a reset if the value isn't an empty array. If the value
// is an empty array there isn't any resetting to do.
if (query.newRecord[attrName].length > 0) {
collectionResets[attrName] = query.newRecord[attrName];
}
// Remove the collection value from the newRecord because the adapter
// doesn't need to do anything during the initial create.
delete query.newRecord[attrName];
}
});//</ each known attribute def >
// Hold a variable for the queries `meta` property that could possibly be
// changed by us later on.
var modifiedMeta;
// If any collection resets were specified, force `fetch: true` (meta key)
// so that we can use it below.
if (_.keys(collectionResets).length > 0) {
// Build a modified shallow clone of the originally-provided `meta`
// that also has `fetch: true`.
modifiedMeta = _.extend({}, query.meta || {}, { fetch: true });
}//>-
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴
// Now, destructively forge this S2Q into a S3Q.
try {
query = forgeStageThreeQuery({
stageTwoQuery: query,
identity: modelIdentity,
transformer: WLModel._transformer,
originalModels: orm.collections
});
} catch (e) { return done(e); }
// ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗
// └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝
// └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═
// Grab the appropriate adapter method and call it.
var adapter = WLModel._adapter;
if (!adapter.create) {
return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.'));
}
// Allow the query to possibly use the modified meta
query.meta = modifiedMeta || query.meta;
// And call the adapter method.
adapter.create(WLModel.datastore, query, function _afterTalkingToAdapter(err, rawAdapterResult) {
if (err) {
err = forgeAdapterError(err, omen, 'create', modelIdentity, orm);
return done(err);
}//-•
// ╔═╗╔╦╗╔═╗╔═╗ ╔╗╔╔═╗╦ ╦ ┬ ┬┌┐┌┬ ┌─┐┌─┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬
// ╚═╗ ║ ║ ║╠═╝ ║║║║ ║║║║ │ │││││ ├┤ └─┐└─┐ ╠╣ ║╣ ║ ║ ╠═╣ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘
// ╚═╝ ╩ ╚═╝╩ ╝╚╝╚═╝╚╩╝ooo └─┘┘└┘┴─┘└─┘└─┘└─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴
// ┬ ┬┌─┐┌─┐ ┌─┐┌─┐┌┬┐ ┌┬┐┌─┐ ┌┬┐┬─┐┬ ┬┌─┐
// │││├─┤└─┐ └─┐├┤ │ │ │ │ │ ├┬┘│ │├┤
// └┴┘┴ ┴└─┘ └─┘└─┘ ┴ ┴ └─┘ ┴ ┴└─└─┘└─┘
// If `fetch` was not enabled, return.
var fetch = modifiedMeta || (_.has(query.meta, 'fetch') && query.meta.fetch);
if (!fetch) {
// > Note: This `if` statement is a convenience, for cases where the result from
// > the adapter may have been coerced from `undefined` to `null` automatically.
// > (we want it to be `undefined` still, for consistency)
if (_.isNull(rawAdapterResult)) {
return done();
}//-•
if (!_.isUndefined(rawAdapterResult)) {
console.warn('\n'+
'Warning: Unexpected behavior in database adapter:\n'+
'Since `fetch` is NOT enabled, this adapter (for datastore `'+WLModel.datastore+'`)\n'+
'should NOT have sent back anything as the 2nd argument when triggering the callback\n'+
'from its `create` method. But it did -- which is why this warning is being displayed:\n'+
'to help avoid confusion and draw attention to the bug. Specifically, got:\n'+
util.inspect(rawAdapterResult, {depth:5})+'\n'+
'(Ignoring it and proceeding anyway...)'+'\n'
);
}//>-
return done();
}//-•
// IWMIH then we know that `fetch: true` meta key was set, and so the
// adapter should have sent back an array.
// Sanity check:
if (!_.isObject(rawAdapterResult) || _.isArray(rawAdapterResult) || _.isFunction(rawAdapterResult)) {
return done(new Error('Consistency violation: expected `create` adapter method to send back the created record b/c `fetch: true` was enabled. But instead, got: ' + util.inspect(rawAdapterResult, {depth:5})+''));
}
// ╔╦╗╦═╗╔═╗╔╗╔╔═╗╔═╗╔═╗╦═╗╔╦╗ ┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐ ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐
// ║ ╠╦╝╠═╣║║║╚═╗╠╣ ║ ║╠╦╝║║║ ├─┤ ││├─┤├─┘ │ ├┤ ├┬┘ ├┬┘├┤ └─┐│ ││ │
// ╩ ╩╚═╩ ╩╝╚╝╚═╝╚ ╚═╝╩╚═╩ ╩ ┴ ┴─┴┘┴ ┴┴ ┴ └─┘┴└─ ┴└─└─┘└─┘└─┘┴─┘┴
// Attempt to convert the record's column names to attribute names.
var transformedRecord;
try {
transformedRecord = WLModel._transformer.unserialize(rawAdapterResult);
} catch (e) { return done(e); }
// Check the record to verify compliance with the adapter spec,
// as well as any issues related to stale data that might not have been
// been migrated to keep up with the logical schema (`type`, etc. in
// attribute definitions).
try {
processAllRecords([ transformedRecord ], query.meta, modelIdentity, orm);
} catch (e) { return done(e); }
// ┌─┐┌─┐┬ ┬ ╦═╗╔═╗╔═╗╦ ╔═╗╔═╗╔═╗ ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗╦╔═╗╔╗╔ ┌─┐┌─┐┬─┐
// │ ├─┤│ │ ╠╦╝║╣ ╠═╝║ ╠═╣║ ║╣ ║ ║ ║║ ║ ║╣ ║ ║ ║║ ║║║║ ├┤ │ │├┬┘
// └─┘┴ ┴┴─┘┴─┘ ╩╚═╚═╝╩ ╩═╝╩ ╩╚═╝╚═╝ ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ ╩╚═╝╝╚╝ └ └─┘┴└─
// ┌─┐─┐ ┬┌─┐┬ ┬┌─┐┬┌┬┐┬ ┬ ┬ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐┌┬┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
// ├┤ ┌┴┬┘├─┘│ ││ │ │ │ └┬┘───└─┐├─┘├┤ │ │├┤ │├┤ ││ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐
// └─┘┴ └─┴ ┴─┘┴└─┘┴ ┴ ┴─┘┴ └─┘┴ └─┘└─┘┴└ ┴└─┘─┴┘ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘
var targetId = transformedRecord[WLModel.primaryKey];
async.each(_.keys(collectionResets), function _eachReplaceCollectionOp(collectionAttrName, next) {
WLModel.replaceCollection(targetId, collectionAttrName, collectionResets[collectionAttrName], function(err){
if (err) { return next(err); }
return next();
}, query.meta);
},// ~∞%°
function _afterReplacingAllCollections(err) {
if (err) { return done(err); }
// ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─
// ╠═╣╠╣ ║ ║╣ ╠╦╝ │ ├┬┘├┤ ├─┤ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐
// ╩ ╩╚ ╩ ╚═╝╩╚═ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴
(function _maybeRunAfterLC(proceed){
// If the `skipAllLifecycleCallbacks` meta flag was set, don't run the LC.
if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) {
return proceed(undefined, transformedRecord);
}//-•
// If no afterCreate callback defined, just proceed.
if (!_.has(WLModel._callbacks, 'afterCreate')) {
return proceed(undefined, transformedRecord);
}//-•
// Otherwise, run it.
return WLModel._callbacks.afterCreate(transformedRecord, function(err) {
if (err) {
return proceed(err);
}
return proceed(undefined, transformedRecord);
});
})(function _afterPotentiallyRunningAfterLC(err, transformedRecord) {
if (err) { return done(err); }
// Return the new record.
return done(undefined, transformedRecord);
});//</ ran "after" lifecycle callback, maybe >
});//</ async.each() (calling replaceCollection() for each explicitly-specified plural association) >
});//</ adapter.create() >
});//</ ran "before" lifecycle callback, maybe >
},
explicitCbMaybe,
_.extend(DEFERRED_METHODS, {
// Provide access to this model for use in query modifier methods.
_WLModel: WLModel,
// Set up initial query metadata.
_wlQueryInfo: query,
})
);//</parley>
}; | create()
Create a new record using the specified initial values.
```
// Create a new bank account with a half million dollars,
// and associate it with the logged in user.
BankAccount.create({
balance: 500000,
owner: req.session.userId
})
.exec(function(err) {
// ...
});
```
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Usage without deferred object:
================================================
@param {Dictionary?} newRecord
@param {Function?} explicitCbMaybe
Callback function to run when query has either finished successfully or errored.
(If unspecified, will return a Deferred object instead of actually doing anything.)
@param {Ref?} meta
For internal use.
@returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The underlying query keys:
==============================
@qkey {Dictionary?} newRecord
@qkey {Dictionary?} meta
@qkey {String} using
@qkey {String} method
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | create ( newRecord , explicitCbMaybe , metaContainer ) | javascript | balderdashy/waterline | lib/waterline/methods/create.js | https://github.com/balderdashy/waterline/blob/master/lib/waterline/methods/create.js | MIT |
module.exports = function addToCollection(/* targetRecordIds, collectionAttrName, associatedIds?, explicitCbMaybe?, meta? */) {
// Verify `this` refers to an actual Sails/Waterline model.
verifyModelMethodContext(this);
// Set up a few, common local vars for convenience / familiarity.
var WLModel = this;
var orm = this.waterline;
var modelIdentity = this.identity;
// Build an omen for potential use in the asynchronous callback below.
var omen = buildOmen(addToCollection);
// Build query w/ initial, universal keys.
var query = {
method: 'addToCollection',
using: modelIdentity
};
// ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗
// ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝
// ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗
// ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║
// ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║
// ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝
//
// Handle the various supported usage possibilities
// (locate the `explicitCbMaybe` callback, and extend the `query` dictionary)
// The `explicitCbMaybe` callback, if one was provided.
var explicitCbMaybe;
// Handle the various supported usage possibilities
// (locate the `explicitCbMaybe` callback)
//
// > Note that we define `args` so that we can insulate access
// > to the arguments provided to this function.
var args = arguments;
(function _handleVariadicUsage(){
// The metadata container, if one was provided.
var _meta;
// Handle first two arguments:
// (both of which always have exactly one meaning)
//
// • addToCollection(targetRecordIds, collectionAttrName, ...)
query.targetRecordIds = args[0];
query.collectionAttrName = args[1];
// Handle double meaning of third argument, & then handle the rest:
//
// • addToCollection(____, ____, associatedIds, explicitCbMaybe, _meta)
var is3rdArgArray = !_.isUndefined(args[2]);
if (is3rdArgArray) {
query.associatedIds = args[2];
explicitCbMaybe = args[3];
_meta = args[4];
}
// • addToCollection(____, ____, explicitCbMaybe, _meta)
else {
explicitCbMaybe = args[2];
_meta = args[3];
}
// Fold in `_meta`, if relevant.
if (!_.isUndefined(_meta)) {
query.meta = _meta;
} // >-
})();
// ██████╗ ███████╗███████╗███████╗██████╗
// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗
// ██║ ██║█████╗ █████╗ █████╗ ██████╔╝
// ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗
// ██████╔╝███████╗██║ ███████╗██║ ██║
// ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝
//
// ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗
// ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗
// ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║
// ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║
// ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝
// ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝
//
// ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐
// ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││
// └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘
// ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐
// │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│
// └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘
// If an explicit callback function was specified, then immediately run the logic below
// and trigger the explicit callback when the time comes. Otherwise, build and return
// a new Deferred now. (If/when the Deferred is executed, the logic below will run.)
return parley(
function (done){
// Otherwise, IWMIH, we know that it's time to actually do some stuff.
// So...
//
// ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗
// ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝
// █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗
// ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝
// ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗
// ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴
//
// Forge a stage 2 query (aka logical protostatement)
try {
forgeStageTwoQuery(query, orm);
} catch (e) {
switch (e.code) {
case 'E_INVALID_TARGET_RECORD_IDS':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'The target record ids (i.e. first argument) passed to `.addToCollection()` '+
'should be the ID (or IDs) of target records whose collection will be modified.\n'+
'Details:\n'+
' ' + e.details + '\n'
}, omen)
);
case 'E_INVALID_COLLECTION_ATTR_NAME':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'The collection attr name (i.e. second argument) to `.addToCollection()` should '+
'be the name of a collection association from this model.\n'+
'Details:\n'+
' ' + e.details + '\n'
}, omen)
);
case 'E_INVALID_ASSOCIATED_IDS':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'The associated ids (i.e. using `.members()`, or the third argument) passed to `.addToCollection()` should be '+
'the ID (or IDs) of associated records to add.\n'+
'Details:\n'+
' ' + e.details + '\n'
}, omen)
);
case 'E_NOOP':
return done();
// ^ tolerate no-ops -- i.e. empty array of target record ids or empty array of associated ids (members)
case 'E_INVALID_META':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message: e.message
}, omen)
);
// ^ when the standard usage error message is good enough as-is, without any further customization
default:
return done(e);
// ^ when an internal, miscellaneous, or unexpected error occurs
}
} // >-•
// ┌┐┌┌─┐┬ ┬ ╔═╗╔═╗╔╦╗╦ ╦╔═╗╦ ╦ ╦ ╦ ┌┬┐┌─┐┬ ┬┌─ ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌┬┐┌┐ ┌─┐
// ││││ ││││ ╠═╣║ ║ ║ ║╠═╣║ ║ ╚╦╝ │ ├─┤│ ├┴┐ │ │ │ │ ├─┤├┤ ││├┴┐└─┐
// ┘└┘└─┘└┴┘ ╩ ╩╚═╝ ╩ ╚═╝╩ ╩╩═╝╩═╝╩ ┴ ┴ ┴┴─┘┴ ┴ ┴ └─┘ ┴ ┴ ┴└─┘ ─┴┘└─┘└─┘
(function (proceed){
// Get the model being used as the parent
var WLModel = orm.collections[query.using];
assert.equal(query.using.toLowerCase(), query.using, '`query.using` (identity) should have already been normalized before getting here! But it was not: '+query.using);
// Look up the association by name in the schema definition.
var schemaDef = WLModel.schema[query.collectionAttrName];
// Look up the associated collection using the schema def which should have
// join tables normalized
var WLChild = orm.collections[schemaDef.collection];
assert.equal(schemaDef.collection.toLowerCase(), schemaDef.collection, '`schemaDef.collection` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.collection);
assert.equal(schemaDef.referenceIdentity.toLowerCase(), schemaDef.referenceIdentity, '`schemaDef.referenceIdentity` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.referenceIdentity);
assert.equal(Object.getPrototypeOf(WLChild).identity.toLowerCase(), Object.getPrototypeOf(WLChild).identity, '`Object.getPrototypeOf(WLChild).identity` (identity) should have already been normalized before getting here! But it was not: '+Object.getPrototypeOf(WLChild).identity);
// Flag to determine if the WLChild is a manyToMany relation
var manyToMany = false;
// Check if the schema references something other than the WLChild
if (schemaDef.referenceIdentity !== Object.getPrototypeOf(WLChild).identity) {
manyToMany = true;
WLChild = orm.collections[schemaDef.referenceIdentity];
}
// Check if the child is a join table
if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) {
manyToMany = true;
}
// Check if the child is a through table
if (_.has(Object.getPrototypeOf(WLChild), 'throughTable') && _.keys(WLChild.throughTable).length) {
manyToMany = true;
}
// Ensure the query skips lifecycle callbacks
// Build a modified shallow clone of the originally-provided `meta`
var modifiedMeta = _.extend({}, query.meta || {}, { skipAllLifecycleCallbacks: true });
// ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ ████████╗ ██████╗ ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗
// ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ ╚══██╔══╝██╔═══██╗ ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝
// ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ ██║ ██║ ██║ ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝
// ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ ██║ ██║ ██║ ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝
// ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ ██║ ╚██████╔╝ ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║
// ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝
//
// If the collection uses a join table, build a query that inserts the records
// into the table.
if (manyToMany) {
// ╔╗ ╦ ╦╦╦ ╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌─┐┌┐┌┌─┐┌─┐ ┌┬┐┌─┐┌─┐┌─┐┬┌┐┌┌─┐
// ╠╩╗║ ║║║ ║║ ├┬┘├┤ ├┤ ├┤ ├┬┘├┤ ││││ ├┤ │││├─┤├─┘├─┘│││││ ┬
// ╚═╝╚═╝╩╩═╝═╩╝ ┴└─└─┘└ └─┘┴└─└─┘┘└┘└─┘└─┘ ┴ ┴┴ ┴┴ ┴ ┴┘└┘└─┘
//
// Maps out the parent and child attribute names to use for the query.
var parentReference;
var childReference;
// Find the parent reference
if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) {
// Assumes the generated junction table will only ever have two foreign key
// values. Should be safe for now and any changes would need to be made in
// Waterline-Schema where a map could be formed anyway.
_.each(WLChild.schema, function(wlsAttrDef, key) {
if (!_.has(wlsAttrDef, 'references')) {
return;
}
// If this is the piece of the join table, set the parent reference.
if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName === schemaDef.on) {
parentReference = key;
}
});
}
//‡
// If it's a through table, grab the parent and child reference from the
// through table mapping that was generated by Waterline-Schema.
else if (_.has(Object.getPrototypeOf(WLChild), 'throughTable')) {
childReference = WLChild.throughTable[WLModel.identity + '.' + query.collectionAttrName];
_.each(WLChild.throughTable, function(rhs, key) {
if (key !== WLModel.identity + '.' + query.collectionAttrName) {
parentReference = rhs;
}
});
}
// Find the child reference in a junction table
if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) {
// Assumes the generated junction table will only ever have two foreign key
// values. Should be safe for now and any changes would need to be made in
// Waterline-Schema where a map could be formed anyway.
_.each(WLChild.schema, function(wlsAttrDef, key) {
if (!_.has(wlsAttrDef, 'references')) {
return;
}
// If this is the other piece of the join table, set the child reference.
if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName !== schemaDef.on) {
childReference = key;
}
});
}
// FUTURE: If anonymous junction model's primary key attribute is explicitly
// required, then this isn't going to work, because we're specifying
// a value for the primary key for the new junction records we're creating.
// We could, in waterline-schema (or possibly in sails-hook-orm or maybe
// even in Waterline core?), automatically un-require-ify the primary key
// attribute for anonymous junction models.
// > See https://github.com/balderdashy/sails/issues/4591 for background.
//
// But for now we just do this:
if (WLChild.junctionTable || WLChild.throughTable) {
if (WLChild.schema.id) {
if (WLChild.schema.id.required) {
throw new Error(
'Cannot add to the collection for this many-to-many association because the anonymous '+
'junction model\'s "id" (primary key) is required. This might mean that the default id '+
'in this app\'s `config/models.js` file makes all primary keys required. For more info, '+
'see https://github.com/balderdashy/sails/issues/4591. If you are unsure, check out '+
'https://sailsjs.com/support for help.'
);
}
} else {
// FUTURE: Maybe be smarter about this instead of just checking for `id`
// For now, we just ignore it and let the error happen.
}
}//fi
// ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴
// Build an array to hold all the records being inserted
var joinRecords = [];
// For each target record, build an insert query for the associated records.
_.each(query.targetRecordIds, function(targetId) {
_.each(query.associatedIds, function(associatedId) {
var record = {};
record[parentReference] = targetId;
record[childReference] = associatedId;
joinRecords.push(record);
});
});
// ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
WLChild.createEach(joinRecords, proceed, modifiedMeta);
return;
}//-•
// ██████╗ ███████╗██╗ ██████╗ ███╗ ██╗ ██████╗ ███████╗ ████████╗ ██████╗
// ██╔══██╗██╔════╝██║ ██╔═══██╗████╗ ██║██╔════╝ ██╔════╝ ╚══██╔══╝██╔═══██╗
// ██████╔╝█████╗ ██║ ██║ ██║██╔██╗ ██║██║ ███╗███████╗ ██║ ██║ ██║
// ██╔══██╗██╔══╝ ██║ ██║ ██║██║╚██╗██║██║ ██║╚════██║ ██║ ██║ ██║
// ██████╔╝███████╗███████╗╚██████╔╝██║ ╚████║╚██████╔╝███████║ ██║ ╚██████╔╝
// ╚═════╝ ╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝ ╚══════╝ ╚═╝ ╚═════╝
//
// Otherwise the child records need to be updated to reflect the new foreign
// key value. Because in this case the targetRecordIds **should** only be a
// single value, just an update here should do the trick.
// ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴
// Build up a search criteria
var criteria = {
where: {}
};
criteria.where[WLChild.primaryKey] = query.associatedIds;
// Build up the values to update
var valuesToUpdate = {};
valuesToUpdate[schemaDef.via] = _.first(query.targetRecordIds);
// ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
WLChild.update(criteria, valuesToUpdate, proceed, modifiedMeta);
})(function (err) {
if (err) { return done(err); }
// IWMIH, everything worked!
// > Note that we do not send back a result of any kind-- this it to reduce the likelihood
// > writing userland code that relies undocumented/experimental output.
return done();
});//</ self-calling function (actually talk to the dbs) >
},
explicitCbMaybe,
_.extend(DEFERRED_METHODS, {
// Provide access to this model for use in query modifier methods.
_WLModel: WLModel,
// Set up initial query metadata.
_wlQueryInfo: query,
})
);//</parley>
}; | addToCollection()
Add new child records to the specified collection in each of the target record(s).
```
// For users 3 and 4, add pets 99 and 98 to the "pets" collection.
// > (if either user record already has one of those pets in its "pets",
// > then we just silently skip over it)
User.addToCollection([3,4], 'pets', [99,98]).exec(...);
```
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Usage without deferred object:
================================================
@param {Array?|String?|Number?} targetRecordIds
@param {String?} collectionAttrName
@param {Array?} associatedIds
@param {Function?} explicitCbMaybe
Callback function to run when query has either finished successfully or errored.
(If unspecified, will return a Deferred object instead of actually doing anything.)
@param {Ref?} meta
For internal use.
@returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The underlying query keys:
==============================
@qkey {Array|String|Number} targetRecordIds
The primary key value(s) (i.e. ids) for the parent record(s).
Must be a number or string; e.g. '507f191e810c19729de860ea' or 49
Or an array of numbers or strings; e.g. ['507f191e810c19729de860ea', '14832ace0c179de897'] or [49, 32, 37]
If an empty array (`[]`) is specified, then this is a no-op.
@qkey {String} collectionAttrName
The name of the collection association (e.g. "pets")
@qkey {Array} associatedIds
The primary key values (i.e. ids) for the child records to add.
Must be an array of numbers or strings; e.g. ['334724948aca33ea0f13', '913303583e0af031358bac931'] or [18, 19]
If an empty array (`[]`) is specified, then this is a no-op.
@qkey {Dictionary?} meta
@qkey {String} using
@qkey {String} method
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | addToCollection ( ) | javascript | balderdashy/waterline | lib/waterline/methods/add-to-collection.js | https://github.com/balderdashy/waterline/blob/master/lib/waterline/methods/add-to-collection.js | MIT |
module.exports = function removeFromCollection(/* targetRecordIds?, collectionAttrName?, associatedIds?, explicitCbMaybe?, meta? */) {
// Verify `this` refers to an actual Sails/Waterline model.
verifyModelMethodContext(this);
// Set up a few, common local vars for convenience / familiarity.
var WLModel = this;
var orm = this.waterline;
var modelIdentity = this.identity;
// Build an omen for potential use in the asynchronous callback below.
var omen = buildOmen(removeFromCollection);
// Build query w/ initial, universal keys.
var query = {
method: 'removeFromCollection',
using: modelIdentity
};
// ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗
// ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝
// ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗
// ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║
// ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║
// ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝
//
// Handle the various supported usage possibilities
// (locate the `explicitCbMaybe` callback, and extend the `query` dictionary)
// The `explicitCbMaybe` callback, if one was provided.
var explicitCbMaybe;
// Handle the various supported usage possibilities
// (locate the `explicitCbMaybe` callback)
//
// > Note that we define `args` so that we can insulate access
// > to the arguments provided to this function.
var args = arguments;
(function _handleVariadicUsage(){
// The metadata container, if one was provided.
var _meta;
// Handle first two arguments:
// (both of which always have exactly one meaning)
//
// • removeFromCollection(targetRecordIds, collectionAttrName, ...)
query.targetRecordIds = args[0];
query.collectionAttrName = args[1];
// Handle double meaning of third argument, & then handle the rest:
//
// • removeFromCollection(____, ____, associatedIds, explicitCbMaybe, _meta)
var is3rdArgArray = !_.isUndefined(args[2]);
if (is3rdArgArray) {
query.associatedIds = args[2];
explicitCbMaybe = args[3];
_meta = args[4];
}
// • removeFromCollection(____, ____, explicitCbMaybe, _meta)
else {
explicitCbMaybe = args[2];
_meta = args[3];
}
// Fold in `_meta`, if relevant.
if (!_.isUndefined(_meta)) {
query.meta = _meta;
} // >-
})();
// ██████╗ ███████╗███████╗███████╗██████╗
// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗
// ██║ ██║█████╗ █████╗ █████╗ ██████╔╝
// ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗
// ██████╔╝███████╗██║ ███████╗██║ ██║
// ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝
//
// ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗
// ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗
// ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║
// ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║
// ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝
// ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝
//
// ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐
// ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││
// └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘
// ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐
// │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│
// └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘
// If an explicit callback function was specified, then immediately run the logic below
// and trigger the explicit callback when the time comes. Otherwise, build and return
// a new Deferred now. (If/when the Deferred is executed, the logic below will run.)
return parley(
function (done){
// Otherwise, IWMIH, we know that it's time to actually do some stuff.
// So...
//
// ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗
// ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝
// █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗
// ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝
// ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗
// ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝
// ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴
//
// Forge a stage 2 query (aka logical protostatement)
try {
forgeStageTwoQuery(query, orm);
} catch (e) {
switch (e.code) {
case 'E_INVALID_TARGET_RECORD_IDS':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'The target record ids (i.e. first argument) passed to `.removeFromCollection()` '+
'should be the ID (or IDs) of target records whose collection will be modified.\n'+
'Details:\n'+
' ' + e.details + '\n'
}, omen)
);
case 'E_INVALID_COLLECTION_ATTR_NAME':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'The collection attr name (i.e. second argument) to `.removeFromCollection()` should '+
'be the name of a collection association from this model.\n'+
'Details:\n'+
' ' + e.details + '\n'
}, omen)
);
case 'E_INVALID_ASSOCIATED_IDS':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message:
'The associated ids (i.e. using `.members()`, or the third argument) passed to `.removeFromCollection()` should be '+
'the ID (or IDs) of associated records to remove.\n'+
'Details:\n'+
' ' + e.details + '\n'
}, omen)
);
case 'E_NOOP':
return done();
// ^ tolerate no-ops -- i.e. empty array of target record ids or empty array of associated ids (members)
case 'E_INVALID_META':
return done(
flaverr({
name: 'UsageError',
code: e.code,
details: e.details,
message: e.message
}, omen)
);
// ^ when the standard usage error message is good enough as-is, without any further customization
default:
return done(e);
// ^ when an internal, miscellaneous, or unexpected error occurs
}
} // >-•
// ┌┐┌┌─┐┬ ┬ ╔═╗╔═╗╔╦╗╦ ╦╔═╗╦ ╦ ╦ ╦ ┌┬┐┌─┐┬ ┬┌─ ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌┬┐┌┐ ┌─┐
// ││││ ││││ ╠═╣║ ║ ║ ║╠═╣║ ║ ╚╦╝ │ ├─┤│ ├┴┐ │ │ │ │ ├─┤├┤ ││├┴┐└─┐
// ┘└┘└─┘└┴┘ ╩ ╩╚═╝ ╩ ╚═╝╩ ╩╩═╝╩═╝╩ ┴ ┴ ┴┴─┘┴ ┴ ┴ └─┘ ┴ ┴ ┴└─┘ ─┴┘└─┘└─┘
(function (proceed) {
// Get the model being used as the parent
var WLModel = orm.collections[query.using];
try { assert.equal(query.using.toLowerCase(), query.using, '`query.using` (identity) should have already been normalized before getting here! But it was not: '+query.using); } catch (e) { return proceed(e); }
// Look up the association by name in the schema definition.
var schemaDef = WLModel.schema[query.collectionAttrName];
// Look up the associated collection using the schema def which should have
// join tables normalized
var WLChild = orm.collections[schemaDef.collection];
try {
assert.equal(schemaDef.collection.toLowerCase(), schemaDef.collection, '`schemaDef.collection` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.collection);
assert.equal(schemaDef.referenceIdentity.toLowerCase(), schemaDef.referenceIdentity, '`schemaDef.referenceIdentity` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.referenceIdentity);
assert.equal(Object.getPrototypeOf(WLChild).identity.toLowerCase(), Object.getPrototypeOf(WLChild).identity, '`Object.getPrototypeOf(WLChild).identity` (identity) should have already been normalized before getting here! But it was not: '+Object.getPrototypeOf(WLChild).identity);
} catch (e) { return proceed(e); }
// Flag to determine if the WLChild is a manyToMany relation
var manyToMany = false;
// Check if the schema references something other than the WLChild
if (schemaDef.referenceIdentity !== Object.getPrototypeOf(WLChild).identity) {
manyToMany = true;
WLChild = orm.collections[schemaDef.referenceIdentity];
}
// Check if the child is a join table
if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) {
manyToMany = true;
}
// Check if the child is a through table
if (_.has(Object.getPrototypeOf(WLChild), 'throughTable') && _.keys(WLChild.throughTable).length) {
manyToMany = true;
}
// Ensure the query skips lifecycle callbacks
// Build a modified shallow clone of the originally-provided `meta`
var modifiedMeta = _.extend({}, query.meta || {}, { skipAllLifecycleCallbacks: true });
// ██╗███╗ ██╗ ███╗ ███╗██╗
// ██╔╝████╗ ██║ ████╗ ████║╚██╗
// ██║ ██╔██╗ ██║ ██╔████╔██║ ██║
// ██║ ██║╚██╗██║ ██║╚██╔╝██║ ██║
// ╚██╗██║ ╚████║██╗██╗██║ ╚═╝ ██║██╔╝
// ╚═╝╚═╝ ╚═══╝╚═╝╚═╝╚═╝ ╚═╝╚═╝
//
// ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ ████████╗ ██████╗ ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗
// ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ ╚══██╔══╝██╔═══██╗ ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝
// ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ ██║ ██║ ██║ ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝
// ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ ██║ ██║ ██║ ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝
// ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ ██║ ╚██████╔╝ ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║
// ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝
//
// If the collection uses a join table, build a query that removes the records
// from the table.
if (manyToMany) {
// ╔╗ ╦ ╦╦╦ ╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌─┐┌┐┌┌─┐┌─┐ ┌┬┐┌─┐┌─┐┌─┐┬┌┐┌┌─┐
// ╠╩╗║ ║║║ ║║ ├┬┘├┤ ├┤ ├┤ ├┬┘├┤ ││││ ├┤ │││├─┤├─┘├─┘│││││ ┬
// ╚═╝╚═╝╩╩═╝═╩╝ ┴└─└─┘└ └─┘┴└─└─┘┘└┘└─┘└─┘ ┴ ┴┴ ┴┴ ┴ ┴┘└┘└─┘
//
// Maps out the parent and child attribute names to use for the query.
var parentReference;
var childReference;
// Find the parent reference
if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) {
// Assumes the generated junction table will only ever have two foreign key
// values. Should be safe for now and any changes would need to be made in
// Waterline-Schema where a map could be formed anyway.
_.each(WLChild.schema, function(wlsAttrDef, key) {
if (!_.has(wlsAttrDef, 'references')) {
return;
}
// If this is the piece of the join table, set the parent reference.
if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName === schemaDef.on) {
parentReference = key;
}
});
}
// If it's a through table, grab the parent and child reference from the
// through table mapping that was generated by Waterline-Schema.
else if (_.has(Object.getPrototypeOf(WLChild), 'throughTable')) {
childReference = WLChild.throughTable[WLModel.identity + '.' + query.collectionAttrName];
_.each(WLChild.throughTable, function(rhs, key) {
if (key !== WLModel.identity + '.' + query.collectionAttrName) {
parentReference = rhs;
}
});
}//>-
// Find the child reference in a junction table
if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) {
// Assumes the generated junction table will only ever have two foreign key
// values. Should be safe for now and any changes would need to be made in
// Waterline-Schema where a map could be formed anyway.
_.each(WLChild.schema, function(wlsAttrDef, key) {
if (!_.has(wlsAttrDef, 'references')) {
return;
}
// If this is the other piece of the join table, set the child reference.
if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName !== schemaDef.on) {
childReference = key;
}
});//</_.each()>
}//>-
// ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴ (S)
//
// If only a single targetRecordId is used, this can be proceed in a single
// query. Otherwise multiple queries will be needed - one for each parent.
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// FUTURE: Combine this bit into one single query using something like:
// ```
// { or: [ { and: [{..},{..:{in:[..]}}] }, { and: [{..},{..:{in: [..]}}] }, ... ] }
// ```
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// Build an array to hold `where` clauses for all records being removed.
// For each target record, build a constraint destroy query for the associated records.
var joinRecordWhereClauses = [];
_.each(query.targetRecordIds, function(targetId) {
var whereClauseForTarget = {};
whereClauseForTarget[parentReference] = targetId;
whereClauseForTarget[childReference] = { in: query.associatedIds };
joinRecordWhereClauses.push(whereClauseForTarget);
});
// ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
async.each(joinRecordWhereClauses, function(whereClause, next) {
WLChild.destroy(whereClause, function(err){
if (err) { return next(err); }
return next();
}, modifiedMeta);
},// ~∞%°
function _after(err) {
if (err) { return proceed(err); }
return proceed();
});//</ async.each() >
return;
}//_∏_. </ if this is a n..m (many to many) association >
// ██╗███╗ ██╗ ██╗██╗
// ██╔╝████╗ ██║ ███║╚██╗
// ██║ ██╔██╗ ██║ ╚██║ ██║
// ██║ ██║╚██╗██║ ██║ ██║
// ╚██╗██║ ╚████║██╗██╗██║██╔╝
// ╚═╝╚═╝ ╚═══╝╚═╝╚═╝╚═╝╚═╝
//
// ██████╗ ███████╗██╗ ██████╗ ███╗ ██╗ ██████╗ ███████╗ ████████╗ ██████╗
// ██╔══██╗██╔════╝██║ ██╔═══██╗████╗ ██║██╔════╝ ██╔════╝ ╚══██╔══╝██╔═══██╗
// ██████╔╝█████╗ ██║ ██║ ██║██╔██╗ ██║██║ ███╗███████╗ ██║ ██║ ██║
// ██╔══██╗██╔══╝ ██║ ██║ ██║██║╚██╗██║██║ ██║╚════██║ ██║ ██║ ██║
// ██████╔╝███████╗███████╗╚██████╔╝██║ ╚████║╚██████╔╝███████║ ██║ ╚██████╔╝
// ╚═════╝ ╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝ ╚══════╝ ╚═╝ ╚═════╝
//
// Otherwise, this association is exclusive-- so rather than deleting junction records, we'll need
// to update the child records themselves, nulling out their foreign key value (aka singular, "model", association).
// ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴
//
// Build up criteria that selects child records.
var criteria = { where: {} };
criteria.where[WLChild.primaryKey] = query.associatedIds;
criteria.where[schemaDef.via] = query.targetRecordIds;
// Build up the values to set (we'll null out the other side).
var valuesToUpdate = {};
valuesToUpdate[schemaDef.via] = null;
// ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
// ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
// ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
WLChild.update(criteria, valuesToUpdate, function(err){
if (err) { return proceed(err); }
return proceed();
}, modifiedMeta);//</.update()>
})(function (err) {
if (err) { return done(err); }
// IWMIH, everything worked!
// > Note that we do not send back a result of any kind-- this it to reduce the likelihood
// > writing userland code that relies undocumented/experimental output.
return done();
});//</ self-calling function (actually talk to the dbs) >
},
explicitCbMaybe,
_.extend(DEFERRED_METHODS, {
// Provide access to this model for use in query modifier methods.
_WLModel: WLModel,
// Set up initial query metadata.
_wlQueryInfo: query,
})
);//</parley>
}; | removeFromCollection()
Remove a subset of the members from the specified collection in each of the target record(s).
```
// For users 3 and 4, remove pets 99 and 98 from their "pets" collection.
// > (if either user record does not actually have one of those pets in its "pets",
// > then we just silently skip over it)
User.removeFromCollection([3,4], 'pets', [99,98]).exec(...);
```
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Usage without deferred object:
================================================
@param {Array?|String?|Number?} targetRecordIds
@param {String?} collectionAttrName
@param {Array?} associatedIds
@param {Function?} explicitCbMaybe
Callback function to run when query has either finished successfully or errored.
(If unspecified, will return a Deferred object instead of actually doing anything.)
@param {Ref?} meta
For internal use.
@returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The underlying query keys:
==============================
@qkey {Array|String|Number} targetRecordIds
The primary key value(s) (i.e. ids) for the parent record(s).
Must be a number or string; e.g. '507f191e810c19729de860ea' or 49
Or an array of numbers or strings; e.g. ['507f191e810c19729de860ea', '14832ace0c179de897'] or [49, 32, 37]
If an empty array (`[]`) is specified, then this is a no-op.
@qkey {String} collectionAttrName
The name of the collection association (e.g. "pets")
@qkey {Array} associatedIds
The primary key values (i.e. ids) for the associated child records to remove from the collection.
Must be an array of numbers or strings; e.g. ['334724948aca33ea0f13', '913303583e0af031358bac931'] or [18, 19]
If an empty array (`[]`) is specified, then this is a no-op.
@qkey {Dictionary?} meta
@qkey {String} using
@qkey {String} method
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - | removeFromCollection ( ) | javascript | balderdashy/waterline | lib/waterline/methods/remove-from-collection.js | https://github.com/balderdashy/waterline/blob/master/lib/waterline/methods/remove-from-collection.js | MIT |
module.exports = function() {
var context = structure;
// Name the collection
context.identity = 'foo';
context.primaryKey = 'id';
// Set collection attributes
context._attributes = {
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
name: { type: 'string' },
bars: {
collection: 'bar',
via: 'foo'
}
};
// Build a mock global schema object
context.waterline.schema = {
foo: {
identity: 'foo',
attributes: {
name: 'string',
bars: {
collection: 'bar',
references: 'bar',
on: 'foo_id',
onKey: 'foo'
},
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
}
}
},
bar: {
identity: 'bar',
attributes: {
name: 'string',
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
foo: {
columnName: 'foo_id',
type: 'integer',
foreignKey: true,
references: 'foo',
on: 'id',
onKey: 'id'
}
}
}
};
// Build global collections
context.waterline.collections.foo = {
identity: 'foo',
_attributes: context._attributes
};
context.waterline.collections.bar = {
identity: 'bar',
_attributes: {
name: { type: 'string' },
foo: { model: 'foo' },
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
}
}
};
return context;
}; | Context Fixture for a Belongs To Relationship | module.exports ( ) | javascript | balderdashy/waterline | test/support/fixtures/model/context.belongsTo.fixture.js | https://github.com/balderdashy/waterline/blob/master/test/support/fixtures/model/context.belongsTo.fixture.js | MIT |
module.exports = function() {
var context = structure;
context.identity = 'foo';
context.primaryKey = 'id';
context.connections = {
my_foo: {
config: {},
_adapter: {},
_collections: []
}
};
// Build Out Model Definitions
var models = {
foo: {
identity: 'foo',
datastore: 'my_foo',
attributes: {
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
name: {
type: 'string'
},
bars: {
collection: 'bar',
via: 'foos',
dominant: true
},
foobars: {
collection: 'baz' ,
via: 'foo',
dominant: true
}
}
},
bar: {
identity: 'bar',
datastore: 'my_foo',
attributes: {
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
name: {
type: 'string'
},
foos: {
collection: 'foo',
via: 'bars'
}
}
},
baz: {
identity: 'baz',
datastore: 'my_foo',
attributes: {
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
foo: {
model: 'foo'
}
}
},
bar_foos__foo_bars: {
identity: 'bar_foos__foo_bars',
datastore: 'my_foo',
tables: ['bar', 'foo'],
junctionTable: true,
attributes: {
id: {
primaryKey: true,
autoIncrement: true,
type: 'integer'
},
bar_foos: {
columnName: 'bar_foos',
type: 'integer',
foreignKey: true,
references: 'bar',
on: 'id',
via: 'foo_bars',
groupBy: 'bar'
},
foo_bars: {
columnName: 'foo_bars',
type: 'integer',
foreignKey: true,
references: 'foo',
on: 'id',
via: 'bar_foos',
groupBy: 'foo'
}
}
}
};
// Set context collections
context.waterline.collections = models;
// Set collection attributes
context._attributes = models.foo.attributes;
context.attributes = context._attributes;
context.waterline.connections = context.connections;
// Build Up Waterline Schema
context.waterline.schema.foo = {
identity: 'foo',
datastore: 'my_foo',
attributes: {
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
name: {
type: 'string'
},
bars: {
collection: 'bar_foos__foo_bars',
references: 'bar_foos__foo_bars',
on: 'bar_foos'
},
foobars: {
collection: 'baz',
references: 'baz',
on: 'foo_id'
}
}
};
context.waterline.schema.bar = {
identity: 'bar',
datastore: 'my_foo',
attributes: {
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
name: {
type: 'string'
},
foos: {
collection: 'bar_foos__foo_bars',
references: 'bar_foos__foo_bars',
on: 'foo_bars'
}
}
};
context.waterline.schema.baz = {
identity: 'baz',
datastore: 'my_foo',
attributes: {
id: {
type: 'integer',
autoIncrement: true,
primaryKey: true,
unique: true
},
foo: {
columnName: 'foo_id',
type: 'integer',
foreignKey: true,
references: 'foo',
on: 'id'
}
}
};
context.waterline.schema.bar_foos__foo_bars = {
identity: 'bar_foos__foo_bars',
datastore: 'my_foo',
tables: ['bar', 'foo'],
junctionTable: true,
attributes: {
id: {
primaryKey: true,
autoIncrement: true,
type: 'integer'
},
bar_foos: {
columnName: 'bar_foos',
type: 'integer',
foreignKey: true,
references: 'bar',
on: 'id',
via: 'foo_bars',
groupBy: 'bar'
},
foo_bars: {
columnName: 'foo_bars',
type: 'integer',
foreignKey: true,
references: 'foo',
on: 'id',
via: 'bar_foos',
groupBy: 'foo'
}
}
};
return context;
}; | Context Fixture for a Many To Many Relationship | module.exports ( ) | javascript | balderdashy/waterline | test/support/fixtures/model/context.manyToMany.fixture.js | https://github.com/balderdashy/waterline/blob/master/test/support/fixtures/model/context.manyToMany.fixture.js | MIT |
before(function() {
var collections = [];
collections.push(Waterline.Model.extend({
identity: 'customer',
tableName: 'customer',
primaryKey: 'uuid',
attributes: {
uuid: {
type: 'string'
}
}
}));
collections.push(Waterline.Model.extend({
identity: 'foo',
tableName: 'foo',
primaryKey: 'id',
attributes: {
id: {
type: 'number'
},
customer: {
model: 'customer'
}
}
}));
var schema = new Schema(collections);
transformer = new Transformer(schema.foo.attributes, schema.schema);
}); | Build up real waterline schema for accurate testing | (anonymous) ( ) | javascript | balderdashy/waterline | test/unit/collection/transformations/transformations.serialize.js | https://github.com/balderdashy/waterline/blob/master/test/unit/collection/transformations/transformations.serialize.js | MIT |
const handleReject = reason => {
const rejectedAction = getAction(reason, true);
dispatch(rejectedAction);
throw reason;
}; | Function: handleReject
Calls: getAction to construct the rejected action
Description: This function dispatches the rejected action and returns
the original Error object. Please note the developer is responsible
for constructing and throwing an Error object. The middleware does not
construct any Errors. | handleReject | javascript | pburtchaell/redux-promise-middleware | src/index.js | https://github.com/pburtchaell/redux-promise-middleware/blob/master/src/index.js | MIT |
const handleFulfill = (value = null) => {
const resolvedAction = getAction(value, false);
dispatch(resolvedAction);
return { value, action: resolvedAction };
}; | Function: handleFulfill
Calls: getAction to construct the fullfilled action
Description: This function dispatches the fulfilled action and
returns the success object. The success object should
contain the value and the dispatched action. | handleFulfill | javascript | pburtchaell/redux-promise-middleware | src/index.js | https://github.com/pburtchaell/redux-promise-middleware/blob/master/src/index.js | MIT |
function source(re) {
if (!re) return null;
if (typeof re === "string") return re;
return re.source;
} | @param {RegExp | string } re
@returns {string} | source ( re ) | javascript | react-syntax-highlighter/react-syntax-highlighter | demo/build/react-syntax-highlighter_languages_highlight_latex.js | https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/demo/build/react-syntax-highlighter_languages_highlight_latex.js | MIT |
function either(...args) {
const joined = '(' + args.map((x) => source(x)).join("|") + ")";
return joined;
} | Any of the passed expresssions may match
Creates a huge this | this | that | that match
@param {(RegExp | string)[] } args
@returns {string} | either ( ... args ) | javascript | react-syntax-highlighter/react-syntax-highlighter | demo/build/react-syntax-highlighter_languages_highlight_latex.js | https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/demo/build/react-syntax-highlighter_languages_highlight_latex.js | MIT |
function concat(...args) {
const joined = args.map((x) => source(x)).join("");
return joined;
} | @param {...(RegExp | string) } args
@returns {string} | concat ( ... args ) | javascript | react-syntax-highlighter/react-syntax-highlighter | demo/build/react-syntax-highlighter_languages_highlight_cLike.js | https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/demo/build/react-syntax-highlighter_languages_highlight_cLike.js | MIT |
const hasClosingTag = (match, { after }) => {
const tag = "</" + match[0].slice(1);
const pos = match.input.indexOf(tag, after);
return pos !== -1;
}; | Takes a string like "<Booger" and checks to see
if we can find a matching "</Booger" later in the
content.
@param {RegExpMatchArray} match
@param {{after:number}} param1 | hasClosingTag | javascript | react-syntax-highlighter/react-syntax-highlighter | demo/build/vendors~react-syntax-highlighter_languages_highlight_typescript.js | https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/demo/build/vendors~react-syntax-highlighter_languages_highlight_typescript.js | MIT |
isTrulyOpeningTag: (match, response) => {
const afterMatchIndex = match[0].length + match.index;
const nextChar = match.input[afterMatchIndex];
// nested type?
// HTML should not include another raw `<` inside a tag
// But a type might: `<Array<Array<number>>`, etc.
if (nextChar === "<") {
response.ignoreMatch();
return;
}
// <something>
// This is now either a tag or a type.
if (nextChar === ">") {
// if we cannot find a matching closing tag, then we
// will ignore it
if (!hasClosingTag(match, { after: afterMatchIndex })) {
response.ignoreMatch();
}
}
} | @param {RegExpMatchArray} match
@param {CallbackResponse} response | isTrulyOpeningTag | javascript | react-syntax-highlighter/react-syntax-highlighter | demo/build/vendors~react-syntax-highlighter_languages_highlight_typescript.js | https://github.com/react-syntax-highlighter/react-syntax-highlighter/blob/master/demo/build/vendors~react-syntax-highlighter_languages_highlight_typescript.js | MIT |
exports.default = function notarizing(context) {
return context;
}; | Use electron-notarize to notarize app
@description The app should be notarized after it has been signed and before it’s packaged into a dmg.
Electron-builder has a hook for this called afterSign. You can link that to a javascript file that will be called (and waited for) after sign.
You add it to your top level “build” configuration
@link https://kilianvalkhof.com/2019/electron/notarizing-your-electron-application/ | notarizing ( context ) | javascript | Postcatlab/postcat | scripts/notarize.js | https://github.com/Postcatlab/postcat/blob/master/scripts/notarize.js | Apache-2.0 |
module.exports = (config, options) => {
config.target = 'electron-renderer';
if (options.fileReplacements) {
for (let fileReplacement of options.fileReplacements) {
if (fileReplacement.replace !== 'src/environments/environment.ts') {
continue;
}
let fileReplacementParts = fileReplacement['with'].split('.');
if (fileReplacementParts.length > 1 && ['dev'].indexOf(fileReplacementParts[1]) >= 0) {
config.target = 'web';
}
break;
}
}
config.plugins = [
...config.plugins,
new NodePolyfillPlugin({
excludeAliases: ['console']
})
];
config.module.rules = [
// {
// test: /\.(js|ts)$/,
// use: [
// {
// loader: 'babel-loader',
// options: {
// presets: ['@babel/preset-env', 'stage-3'],
// plugins: ['@babel/plugin-syntax-import-assertions']
// }
// }
// ],
// exclude: /node_modules/,
// resourceQuery: { not: [/\?ngResource/] }
// },
{
test: /\.css$/,
use: ['style-loader', 'css-loader'],
resourceQuery: { not: [/\?ngResource/] }
},
{
test: /\.ttf$/,
type: 'asset/resource',
resourceQuery: { not: [/\?ngResource/] }
},
{
// .md结尾的文件使用markdown-loader规则
test: /\.md$/,
use: ['html-loader', './markdown-loader']
},
...config.module.rules
];
Object.assign(config, {
experiments: {
topLevelAwait: true
}
});
// console.log('config', config.module.rules);
return config;
}; | Custom angular webpack configuration | module.exports | javascript | Postcatlab/postcat | src/browser/angular.webpack.js | https://github.com/Postcatlab/postcat/blob/master/src/browser/angular.webpack.js | Apache-2.0 |
privateFun.parseRequestDataToObj = inputData => {
let tmpOutputObj = {
restParams: inputData.restParams,
queryParams: inputData.queryParams,
requestBody: null
},
tmpRequestBody = inputData.requestBody.body;
try {
switch (inputData.requestType) {
case '2': {
tmpRequestBody = JSON.parse(tmpRequestBody);
break;
}
case '3': {
xml2json.parseString(
tmpRequestBody,
{
explicitArray: false,
ignoreAttrs: true
},
function (error, result) {
if (!error) {
tmpRequestBody = result;
}
}
);
break;
}
case '1': {
tmpOutputObj.raw = tmpRequestBody;
tmpRequestBody = {
raw: tmpRequestBody
};
break;
}
case '4': {
tmpOutputObj.binary = tmpRequestBody;
tmpRequestBody = {
binary: tmpRequestBody
};
break;
}
}
} catch (PARSE_REQUEST_BODY_DATA_ERR) {
console.error(new Date() + ':PARSE_REQUEST_BODY_DATA_ERR::', PARSE_REQUEST_BODY_DATA_ERR);
}
tmpOutputObj.requestBody = tmpRequestBody;
return tmpOutputObj;
}; | @desc 解析请求信息,组合成合适的对象
@param [object] inputData 原始待组合对象
@return [object] | privateFun.parseRequestDataToObj | javascript | Postcatlab/postcat | src/node/test-server/request/libs/common.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/common.js | Apache-2.0 |
privateFun.mergeObj = (inputTargetItem, inputSourceItem) => {
let tmpOutputObj = privateFun.deepCopy(inputTargetItem);
for (let key in inputSourceItem) {
if (tmpOutputObj[key] || tmpOutputObj[key] === 0 || tmpOutputObj[key] === null || tmpOutputObj[key] === false) continue;
tmpOutputObj[key] = inputSourceItem[key];
}
return tmpOutputObj;
}; | merge对象数据
@param {object} inputTargetItem 目标对象
@param {object} inputSourceItem 源对象
@returns {object} 已合并输出对象 | privateFun.mergeObj | javascript | Postcatlab/postcat | src/node/test-server/request/libs/common.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/common.js | Apache-2.0 |
privateFun.resetEnv = (inputBaiscEnv, inputSanboxVar) => {
let tmpResult = Object.assign({}, inputBaiscEnv);
tmpResult.envParam = _LibsCommon.deepCopy(inputSanboxVar.envParam);
['http'].map(val => {
tmpResult[val] = {};
for (let itemKey in inputSanboxVar[val]) {
if (['extraFormDataParam', 'queryParam', 'headerParam', 'baseUrlParam', 'requestScript', 'responseScript'].indexOf(itemKey) > -1) {
tmpResult[val][itemKey] = inputSanboxVar[val][itemKey];
}
}
});
return tmpResult;
}; | @desc 重置env
@param {object} inputSanboxVar 沙箱中的env变量
@param {object} inputBaiscEnv 基础的env | privateFun.resetEnv | javascript | Postcatlab/postcat | src/node/test-server/request/libs/apiUtil.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/apiUtil.js | Apache-2.0 |
throw_err: tmpInputMsg => {
throw `codeError_${tmpInputMsg}`;
}, | 输出错误信息并停止继续执行任何代码
@param {string} info 输出信息体 | throw_err | javascript | Postcatlab/postcat | src/node/test-server/request/libs/apiUtil.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/apiUtil.js | Apache-2.0 |
privateFun.parseBeforeCode = async function (scritEngines = 'pm', inputData, inputScript, inputOpts = {}) {
const tmpBasicEnv = inputData.env || _LibsCommon.parseEnv();
const tmpOutput = {
status: 'finish',
url: inputData.url,
headers: {},
params: null,
env: tmpBasicEnv,
reportList: []
};
let tmpTargetTypeData;
let tmpTargetTypeEnv;
let tmpEnviroments;
let tmpBinary = inputData.binary;
inputOpts.authInfo = inputOpts.authInfo || { authType: 'none', authInfo: {} };
switch (scritEngines) {
case 'pm': {
tmpTargetTypeEnv = {
baseUrlParam: tmpBasicEnv.http.baseUrlParam
};
//There is no need to execute pmRuntime while there is no script and no authInfo
if (!(inputScript || inputOpts.authInfo)) {
tmpTargetTypeData = {
apiUrl: inputData.url,
bodyParam: inputData.raw,
bodyParseParam: inputData.params,
queryParam: inputData.query,
headerParam: inputData.headers
};
break;
}
//Get runtime instance
const ctx = await pmRuntime.createContextAsync({ timeout: 10000, disableLegacyAPIs: true });
let tmpEnvGlobals = Object.assign({}, global.eoTestGlobals || {}, tmpBasicEnv.envParam || {});
const context = {
enviroment: [],
request: {
url: inputData.url,
method: {
0: 'POST',
1: 'GET',
2: 'PUT',
3: 'DELETE',
4: 'HEAD',
5: 'OPTIONS',
6: 'PATCH'
}[inputData.apiRequestType],
body: {},
auth: {
type: inputOpts.authInfo.authType,
[inputOpts.authInfo.authType]: Object.keys(inputOpts.authInfo.authInfo).map(keyName => ({
key: keyName,
type: 'any',
value: inputOpts.authInfo.authInfo[keyName]
}))
},
header: Object.keys(inputData.headers).map(keyName => ({ key: keyName, value: inputData.headers[keyName] }))
},
globals: Object.keys(tmpEnvGlobals).map(keyName => ({ key: keyName, type: 'any', value: tmpEnvGlobals[keyName] }))
};
switch (inputData.requestType) {
case '0': {
context.request.body.mode = 'urlencoded';
context.request.body.urlencoded = Object.keys(inputData.params).map(keyName => ({
key: keyName,
value: inputData.params[keyName]
}));
break;
}
default: {
context.request.body.mode = 'raw';
context.request.body.raw = inputData.raw;
break;
}
}
//TODO : no script,no execute
//Excute code in runtime
let [pmRes, err] = await pmRuntime.executeSync(ctx, inputScript, {
context: context
});
if (err) {
return Object.assign(tmpOutput, {
status: 'beforeCodeError',
content: err.message
});
}
//Parse authInfo to request data
if (inputOpts.authInfo) {
switch (inputOpts.authInfo.authType) {
case 'none': {
break;
}
default: {
//Get extension function
const [extensionCache, err] = await loadExtension({
name: inputOpts.authInfo.authType
});
if (err) {
console.error(`install auth extension code error: ${err}}`);
break;
}
const { extension, packageJson } = extensionCache;
if (!packageJson?.features?.authAPI) break;
//Prepare auth info,such as replace global variable
let tmpEnvGlobals = Object.assign({}, global.eoTestGlobals || {}, tmpEnviroments || {});
let authInfo = pmRes.request.auth[pmRes.request.auth.type] || [];
let authInfoStr = JSON.stringify(authInfo);
for (const name in tmpEnvGlobals) {
const value = tmpEnvGlobals[name];
authInfoStr = _LibsCommon.replaceAll(`{{${name}}}`, value || '', authInfoStr);
}
try {
authInfo = JSON.parse(authInfoStr);
const { action } = packageJson.features.authAPI;
const func = extension[action];
const config = (authInfo || []).reduce((acc, cur) => ({ ...acc, [cur.key]: cur.value }), {});
//Execute at runtime
const code = await func(config);
const [authPmRes, err] = await pmRuntime.executeSync(ctx, code, {
context: pmRes
});
if (err) {
console.error(`execute auth extension code error: ${err}}`);
break;
}
pmRes = authPmRes;
} catch (e) {
console.error(`auth error: ${e.message}`);
}
break;
}
}
}
//Handle Request Data
const Url = require('postman-collection').Url;
const apiUrl = new Url(pmRes.request.url);
tmpTargetTypeData = {
apiUrl: apiUrl.toString(),
bodyParam: pmRes.request.body.raw,
bodyParseParam: (pmRes.request.body.urlencoded || []).reduce((acc, cur) => ({ ...acc, [cur.key]: cur.value }), {}),
queryParam: (pmRes.request.url.query || []).reduce((acc, cur) => ({ ...acc, [cur.key]: cur.value }), {}),
headerParam: (pmRes.request.header || []).reduce((acc, cur) => ({ ...acc, [cur.key]: cur.value }), {})
};
global.eoTestGlobals = (pmRes.globals.values || []).reduce((acc, cur) => ({ ...acc, [cur.key]: cur.value }), {});
//for fit eolink
tmpOutput.url = apiUrl.toString().split('?')[0];
break;
}
case 'vm': {
tmpApiType = inputOpts.type || 'http';
inputData = JSON.parse(JSON.stringify(inputData));
//!Can't delete,for eval warning tips
let tmpTitle = inputData.title || (inputData.isReturnSoonWhenExecCode ? '环境-API 前置脚本' : '');
let tmpErrorContent, tmpStatus;
let tmpSanboxObj = {
requestBody: inputData.requestBody || {},
requestHeaders: inputData.requestHeaders || {},
restParams: inputData.restParams || {},
queryParams: inputData.queryParams || {},
responseHeaders: inputData.responseHeaders || {},
response: inputData.response || {},
CryptoJS: CryptoJS,
$: $,
window: window,
document: document,
pc: {
info: (tmpInputMsg, tmpInputType) => {
let tmpInputMsgType = ['[object Date]'].includes(Object.prototype.toString.call(tmpInputMsg));
let tmpText;
try {
tmpText = tmpInputMsgType
? tmpInputMsg.toString()
: typeof tmpInputMsg === 'object'
? JSON.stringify(tmpInputMsg)
: tmpInputMsg;
} catch (JSON_STRINGIFY_ERROR) {
tmpText = tmpInputMsg.toString();
}
tmpOutput.reportList.push({
content: tmpText,
type: tmpInputType || 'throw'
});
},
error: tmpInputMsg => {
let tmpInputMsgType = ['[object Date]'].includes(Object.prototype.toString.call(tmpInputMsg));
let tmpText;
try {
tmpText = tmpInputMsgType
? tmpInputMsg.toString()
: typeof tmpInputMsg === 'object'
? JSON.stringify(tmpInputMsg)
: tmpInputMsg;
} catch (JSON_STRINGIFY_ERROR) {
tmpText = tmpInputMsg.toString();
}
tmpOutput.reportList.push({
content: tmpText,
type: 'assert_error'
});
tmpErrorContent = eval(global.eoLang['assertError']);
tmpStatus = 'assertError';
},
stop: tmpInputMsg => {
let tmpInputMsgType = ['[object Date]'].includes(Object.prototype.toString.call(tmpInputMsg));
let tmpText;
try {
tmpText = tmpInputMsgType
? tmpInputMsg.toString()
: typeof tmpInputMsg === 'object'
? JSON.stringify(tmpInputMsg)
: tmpInputMsg;
} catch (JSON_STRINGIFY_ERROR) {
tmpText = tmpInputMsg.toString();
}
tmpOutput.reportList.push({
content: tmpText,
type: 'interrupt'
});
throw 'interrupt';
}
}
};
const tmpVm = new NodeVM({
sandbox: tmpSanboxObj,
require: {
external: true,
builtin: ['crypto']
}
});
const tmpCodeEvalObj = tmpVm._context;
tmpCodeEvalObj.pc = privateFun.constructUiCodeBasicFn(tmpCodeEvalObj, tmpBasicEnv, inputOpts);
//Merge fucntion execute response and inputData
privateFun.setTypesRefFns(tmpCodeEvalObj.pc, inputData);
tmpTargetTypeData = tmpCodeEvalObj.pc[tmpApiType];
tmpTargetTypeEnv = tmpCodeEvalObj.pc.env[tmpApiType];
tmpEnviroments = tmpCodeEvalObj.pc.env.envParam;
let tmpNeedToExecRequestScript = tmpTargetTypeEnv.requestScript && !inputData.ingnoreRequestScript;
if (inputScript || tmpNeedToExecRequestScript) {
try {
// // execute common function
// if (inputOpts) {
// _LibsCommon.execFnDefine(inputOpts.functionCode || [], tmpVm, tmpCodeEvalObj.eo);
// }
if (!inputData.isReturnSoonWhenExecCode && tmpNeedToExecRequestScript) {
tmpNowIsExecuteEnvScript = true;
tmpVm.run(_LibsCommon.infiniteLoopDetector.wrap(tmpTargetTypeEnv.requestScript || '', 'pc.infiniteLoopDetector'));
}
tmpVm.run(_LibsCommon.infiniteLoopDetector.wrap(inputScript || '', 'pc.infiniteLoopDetector'));
} catch (Err) {
switch (Err) {
case 'info':
case 'interrupt':
case 'illegal':
case 'localhost':
case 'timeout': {
tmpStatus = 'terminateRequest';
switch (Err) {
case 'info': {
tmpStatus = 'info';
tmpErrorContent = 'pc.info 触发中断';
break;
}
case 'interrupt': {
tmpErrorContent = eval(global.eoLang['42c487b2-4b68-4dd1-834e-e1c978c8ea51']);
break;
}
default: {
tmpErrorContent = global.eoLang['d6fa1d73-6a43-477f-a6df-6752661c9df3'];
break;
}
}
break;
}
default: {
tmpStatus = 'beforeCodeError';
if (/^codeError_/.test(Err)) {
tmpErrorContent = Err.split('codeError_')[1];
} else {
let tmpErrParseObj = _LibsCommon.execCodeErrWarning(Err);
let tmpErrorLine = tmpErrParseObj.row,
tmpErrorColumn = tmpErrParseObj.col,
tmpFnName = tmpErrParseObj.fn; //不能删,错误信息的时候需要
tmpErrorContent = tmpFnName
? eval(`\`${global.eoLang['publicFnExecuteErrMsg']}\``)
: eval(`\`${global.eoLang['requestPreReduceErrMsg']}\``);
}
}
}
}
if (tmpStatus) {
return {
status: tmpStatus,
content: tmpErrorContent,
url: tmpTargetTypeData.url.parse(),
headers: tmpTargetTypeData.headerParam,
params: tmpTargetTypeData.bodyParseParam || tmpTargetTypeData.bodyParam,
env: privateFun.resetEnv(tmpBasicEnv, tmpCodeEvalObj.pc.env),
reportList: tmpOutput.reportList
};
}
}
tmpOutput.env = privateFun.resetEnv(tmpBasicEnv, tmpCodeEvalObj.pc.env);
if (inputData.isReturnSoonWhenExecCode) return tmpOutput;
break;
}
}
//Reuse eolink logic
let tmpParams, tmpHeaders;
try {
let tmp_query_param_obj = Object.assign({}, tmpTargetTypeEnv.queryParam, tmpTargetTypeData.queryParam);
tmpHeaders = Object.assign({}, tmpTargetTypeEnv.headerParam, tmpTargetTypeData.headerParam);
switch (inputData.requestType.toString()) {
case '0': {
tmpParams = _LibsCommon.mergeObj(tmpTargetTypeData.bodyParseParam, tmpTargetTypeEnv.extraFormDataParam);
break;
}
case '2': {
if (/^\[/.test(tmpTargetTypeData.bodyParam)) {
tmpParams = JSON.stringify([JSON.parse(tmpTargetTypeData.bodyParam)[0]]);
} else {
tmpParams = tmpTargetTypeData.bodyParam;
}
break;
}
case '3': {
/**
* @desc 去除xml自动补全额外参数功能
*/
tmpParams = _Xml_Class.jsonToXml()(JSON.parse(tmpTargetTypeData.bodyParam), inputData.xmlAttrObj);
break;
}
case '1': {
tmpParams =
typeof tmpTargetTypeData.bodyParam === 'string' ? tmpTargetTypeData.bodyParam : JSON.stringify(tmpTargetTypeData.bodyParam);
break;
}
}
let tmpEnvGlobals = Object.assign({}, global.eoTestGlobals || {}, tmpEnviroments || {});
tmpOutput.url = tmpTargetTypeData.apiUrl.split('?')[0];
for (let key in tmpEnvGlobals) {
let val = tmpEnvGlobals[key];
let templateParamObject = {};
let templateHeaderObject = {};
for (let tmp_query_param_key in tmp_query_param_obj) {
let tmp_query_param_val = _LibsCommon.replaceAll('{{' + key + '}}', val || '', tmp_query_param_obj[tmp_query_param_key]);
delete tmp_query_param_obj[tmp_query_param_key];
tmp_query_param_obj[_LibsCommon.replaceAll('{{' + key + '}}', val || '', tmp_query_param_key)] = tmp_query_param_val;
}
tmpOutput.url = _LibsCommon.replaceAll('{{' + key + '}}', val || '', tmpOutput.url);
for (let childKey in tmpHeaders) {
tmpHeaders[childKey] = _LibsCommon.replaceAll('{{' + key + '}}', val, tmpHeaders[childKey]);
if (childKey.indexOf('{{' + key + '}}') > -1) {
templateHeaderObject[_LibsCommon.replaceAll('{{' + key + '}}', val, childKey)] = tmpHeaders[childKey];
} else {
templateHeaderObject[childKey] = tmpHeaders[childKey];
}
}
tmpHeaders = templateHeaderObject;
if (!tmpBinary) {
switch (typeof tmpParams) {
case 'string': {
tmpParams = _LibsCommon.replaceAll('{{' + key + '}}', val, tmpParams);
break;
}
default: {
for (let childKey in tmpParams) {
switch (typeof tmpParams[childKey]) {
case 'string': {
tmpParams[childKey] = _LibsCommon.replaceAll('{{' + key + '}}', val, tmpParams[childKey]);
break;
}
default: {
for (let grandSonKey in tmpParams[childKey]) {
let grandSonVal = tmpParams[childKey][grandSonKey];
switch (typeof grandSonVal) {
case 'string': {
tmpParams[childKey][grandSonKey] = _LibsCommon.replaceAll('{{' + key + '}}', val, grandSonVal);
break;
}
}
}
break;
}
}
if (childKey.indexOf('{{' + key + '}}') > -1) {
let tmpHadReplaceString = _LibsCommon.replaceAll('{{' + key + '}}', val, childKey);
templateParamObject[tmpHadReplaceString] = tmpParams[childKey];
if (tmpParams[tmpHadReplaceString]) {
switch (_LibsCommon.getTypeOfVar(templateParamObject[tmpHadReplaceString])) {
case 'Array': {
if (_LibsCommon.getTypeOfVar(tmpParams[tmpHadReplaceString]) == 'Array') {
templateParamObject[tmpHadReplaceString] = templateParamObject[tmpHadReplaceString].concat(
tmpParams[tmpHadReplaceString]
);
} else {
templateParamObject[tmpHadReplaceString] = templateParamObject[tmpHadReplaceString].push(
tmpParams[tmpHadReplaceString]
);
}
break;
}
default: {
if (_LibsCommon.getTypeOfVar(tmpParams[tmpHadReplaceString]) == 'Array') {
templateParamObject[tmpHadReplaceString] = tmpParams[tmpHadReplaceString].push(
templateParamObject[tmpHadReplaceString]
);
} else {
templateParamObject[tmpHadReplaceString] = [
templateParamObject[tmpHadReplaceString],
tmpParams[tmpHadReplaceString]
];
}
break;
}
}
}
} else {
templateParamObject[childKey] = tmpParams[childKey];
}
}
tmpParams = templateParamObject;
}
}
}
}
tmpOutput.headers = tmpHeaders;
tmpOutput.queryParams = tmp_query_param_obj;
let queryString = querystring.stringify(tmpOutput.queryParams);
tmpOutput.url += queryString ? '?' + queryString : '';
for (let key in tmpTargetTypeData.restParam) {
tmpOutput.url = privateFun.replaceRestParam(key, tmpTargetTypeData.restParam[key], tmpOutput.url);
}
if (tmpBinary) {
tmpOutput.params = {};
} else {
tmpOutput.params = tmpParams;
}
if (!new RegExp(DOMAIN_REGEX).test(tmpOutput.url)) {
tmpOutput.url = (tmpTargetTypeEnv.baseUrlParam || '') + tmpOutput.url;
}
if (!/"content-type":/i.test(JSON.stringify(tmpOutput.headers))) {
switch (inputData.requestType.toString()) {
case '0': {
tmpOutput.headers['Content-Type'] = 'application/x-www-form-urlencoded';
break;
}
case '2': {
tmpOutput.headers['Content-Type'] = 'application/json';
break;
}
case '3': {
tmpOutput.headers['Content-Type'] = 'application/xml';
break;
}
case '4': {
if (/(data:)(.*)(;base64),/.test(tmpBinary)) {
tmpOutput.headers['Content-Type'] = RegExp.$2;
} else {
tmpOutput.headers['Content-Type'] = 'false';
}
break;
}
}
}
} catch (e) {
console.error(new Date() + ':libs/common.js 217:', e);
}
return tmpOutput;
}; | 前置脚本代码
@param {string} inputData 请求可分别赋值信息
@param {string} inputScript 前置脚本代码
@param {object} inputOpts options
@return {object} 前置组合请求信息 | privateFun.parseBeforeCode ( scritEngines = 'pm' , inputData , inputScript , inputOpts = { } ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/apiUtil.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/apiUtil.js | Apache-2.0 |
privateFun.aesOrDesAndEncryptOrDecrypt = (inputMode, inputData, inputKey, inputOpts) => {
if (inputOpts) {
inputOpts = Object.assign({}, inputOpts);
if (inputOpts.mode) inputOpts.mode = CryptoJS.mode[inputOpts.mode];
if (inputOpts.padding) inputOpts.padding = CryptoJS.pad[inputOpts.padding];
if (inputOpts.iv) inputOpts.iv = CryptoJS.enc.Latin1.parse(inputOpts.iv || '');
}
inputKey = CryptoJS.enc.Latin1.parse(inputKey || '');
let tmpType = inputMode.split('-')[0],
tmpOpr = inputMode.split('-')[1];
switch (tmpOpr) {
case 'decrypt': {
return CryptoJS[tmpType].decrypt(inputData, inputKey, inputOpts).toString(CryptoJS.enc.Utf8);
}
case 'encrypt': {
return CryptoJS[tmpType].encrypt(inputData, inputKey, inputOpts).toString();
}
}
}; | @desc AES/DES解密
@param {string} inputMode 选择模式
@param {string} inputKey 加密密钥
@param {string} inputData 待加密数据
@param {object} inputOpts 配置项,padding/iv/mode
@return {string} 结果字符串 | privateFun.aesOrDesAndEncryptOrDecrypt | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.aesDecrypt = (inputData, inputKey, inputOpts) => {
return privateFun.aesOrDesAndEncryptOrDecrypt('AES-decrypt', inputData, inputKey, inputOpts);
}; | @desc AES/DES解密
@param {string} inputKey 加密密钥
@param {string} inputData 待加密数据
@param {object} inputOpts 配置项,padding/iv/mode
@return {string} 解密后字符串 | publicFun.aesDecrypt | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.aesEncrypt = (inputData, inputKey, inputOpts) => {
return privateFun.aesOrDesAndEncryptOrDecrypt('AES-encrypt', inputData, inputKey, inputOpts);
}; | @desc AES/DES加密
@param {string} inputKey 加密密钥
@param {string} inputData 待加密数据
@param {object} inputOpts 配置项,padding/iv/mode
@return {string} 加密后字符串 | publicFun.aesEncrypt | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
privateFun.createSign = function (encryption, message, secretKey, options) {
options = options || {
hash: 'base64'
};
let sign = Crypto.createSign(encryption);
sign.update(message || '', 'utf8');
try {
if (typeof secretKey === 'object' && typeof secretKey.padding === 'string') {
secretKey.padding = Crypto.constants[secretKey.padding];
}
return sign.sign(secretKey, options.hash);
} catch (e) {
return 'SignError';
}
}; | @description 公用加密算法(create sign)
@param [string] encryption 加密方式
@param [string] message 待加密内容
@param [string] secretKey 密钥
@param [object] options 配置可选项
@returns [string] 已加密内容 | privateFun.createSign ( encryption , message , secretKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
privateFun.createHash = function (encryption, message, options) {
options = options || {
hash: 'hex'
};
return Crypto.createHash(encryption)
.update(message || '')
.digest(options.hash);
}; | @description 公用加密算法(createHash)
@param [string] encryption 加密方式
@param [string] message 待加密内容
@param [object] options
@returns [string] 已加密内容 | privateFun.createHash ( encryption , message , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
privateFun.createHmac = function (encryption, message, key, options) {
options = options || {
hash: 'hex'
};
return Crypto.createHmac(encryption, key || '')
.update(message || '')
.digest(options.hash);
}; | @description 公用加密算法(createHmac)
@param [string] encryption 加密方式
@param [string] message 待加密内容
@returns [string] 已加密内容 | privateFun.createHmac ( encryption , message , key , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.md5 = function (info) {
return privateFun.createHash('md5', info);
}; | md5数据加密
@param {string} info 需加密信息体
return {string} md5加密后信息 | publicFun.md5 ( info ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.HmacSHA1 = function (info, key, options) {
return privateFun.createHmac('sha1', info, key, options);
}; | HmacSHA1数据加密
@param {string} info 需加密信息体
@param {string} key 密钥
@param {object} options 配置
return {string} HmacSHA1加密后信息 | publicFun.HmacSHA1 ( info , key , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.HmacSHA256 = function (info, key, options) {
return privateFun.createHmac('sha256', info, key, options);
}; | HmacSHA256数据加密
@param {string} info 需加密信息体
@param {string} key 密钥
@param {object} options 配置
return {string} HmacSHA256加密后信息 | publicFun.HmacSHA256 ( info , key , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.HmacSHA224 = function (info, key, options) {
return privateFun.createHmac('sha224', info, key, options);
}; | HmacSHA224
@param {string} info 需加密信息体
@param {string} key 密钥
@param {object} options 配置
return {string} HmacSHA224加密后信息 | publicFun.HmacSHA224 ( info , key , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.HmacSHA384 = function (info, key, options) {
return privateFun.createHmac('sha384', info, key, options);
}; | HmacSHA384
@param {string} info 需加密信息体
@param {string} key 密钥
@param {object} options 配置
return {string} HmacSHA384加密后信息 | publicFun.HmacSHA384 ( info , key , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.HmacSHA512 = function (info, key, options) {
return privateFun.createHmac('sha512', info, key, options);
}; | HmacSHA512
@param {string} info 需加密信息体
@param {string} key 密钥
@param {object} options 配置
return {string} HmacSHA512加密后信息 | publicFun.HmacSHA512 ( info , key , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.sha1 = function (info, options) {
return privateFun.createHash('sha1', info, options);
}; | sha1数据加密
@param {string} info 需加密信息体
@param {object} options 配置
return {string} sha1加密后信息 | publicFun.sha1 ( info , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.sha256 = function (info, options) {
return privateFun.createHash('sha256', info, options);
}; | sha256数据加密
@param {string} info 需加密信息体
@param {object} options 配置
return {string} sha256加密后信息 | publicFun.sha256 ( info , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.sha224 = function (info, options) {
return privateFun.createHash('sha224', info, options);
}; | sha224
@param {string} info 需加密信息体
@param {object} options 配置
return {string} sha224加密后信息 | publicFun.sha224 ( info , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.sha384 = function (info, options) {
return privateFun.createHash('sha384', info, options);
}; | sha384
@param {string} info 需加密信息体
@param {object} options 配置
return {string} sha384加密后信息 | publicFun.sha384 ( info , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.sha512 = function (info, options) {
return privateFun.createHash('sha512', info, options);
}; | sha512
@param {string} info 需加密信息体
@param {object} options 配置
return {string} sha512加密后信息 | publicFun.sha512 ( info , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.RS256 = function (info, privateKey, options) {
if (typeof options === 'string') {
options = {
hash: options
};
}
return privateFun.createSign('RSA-SHA256', info, privateKey, options);
}; | RS256
@param {string} info 需加密信息体
@param {string} priviteKey 私钥
@param {object} options 配置
return {string} RS256加密后信息 | publicFun.RS256 ( info , privateKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.RS384 = function (info, privateKey, options) {
return privateFun.createSign('RSA-SHA384', info, privateKey, options);
}; | RS384
@param {string} info 需加密信息体
@param {string} priviteKey 私钥
@param {object} options 配置
return {string} RS384加密后信息 | publicFun.RS384 ( info , privateKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.RS512 = function (info, privateKey, options) {
return privateFun.createSign('RSA-SHA512', info, privateKey, options);
}; | RS512
@param {string} info 需加密信息体
@param {string} priviteKey 私钥
@param {object} options 配置
return {string} RS512加密后信息 | publicFun.RS512 ( info , privateKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.RS1 = function (info, privateKey, options) {
if (typeof options === 'string') {
options = {
hash: options
};
}
return privateFun.createSign('RSA-SHA1', info, privateKey, options);
}; | RS1
@param {string} info 需加密信息体
@param {string} priviteKey 私钥
@param {object} options 配置
return {string} RSA-SHA1加密后信息 | publicFun.RS1 ( info , privateKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.ES256 = function (info, privateKey, options) {
return privateFun.createEcdsa()('256', info, privateKey, options);
}; | ES256
@param {string} info 需加密信息体
@param {string} priviteKey 私钥
@param {object} options 配置
return {string} ES256加密后信息 | publicFun.ES256 ( info , privateKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.ES384 = function (info, privateKey, options) {
return privateFun.createEcdsa()('384', info, privateKey, options);
}; | ES384
@param {string} info 需加密信息体
@param {string} priviteKey 私钥
@param {object} options 配置
return {string} ES384加密后信息 | publicFun.ES384 ( info , privateKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
publicFun.ES512 = function (info, privateKey, options) {
return privateFun.createEcdsa()('512', info, privateKey, options);
}; | ES512
@param {string} info 需加密信息体
@param {string} priviteKey 私钥
@param {object} options 配置
return {string} ES512加密后信息 | publicFun.ES512 ( info , privateKey , options ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/encrypt.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/encrypt.js | Apache-2.0 |
eo_define_arr_to_json(input_eo_original_arr, input_parent_obj = {}, input_opts = {}, input_xml_attr_parent_obj = {}) {
let vm = this;
input_eo_original_arr.map(function (val) {
if (!val.paramKey || !val.checkbox) {
return;
}
try {
if (input_opts.needToParseBindData) {
val.paramKey = vm.construct_text_by_express_builder(vm.replace_text_from_bind_obj(val.paramKey, input_opts.bindObj));
val.paramInfo = vm.construct_text_by_express_builder(
vm.replace_text_from_bind_obj(val.paramInfo || val.paramValue || '', input_opts.bindObj)
);
if (input_opts.isXml)
val.attribute = vm.construct_text_by_express_builder(vm.replace_text_from_bind_obj(val.attribute || '', input_opts.bindObj));
} else {
val.paramKey = vm.construct_text_by_express_builder(val.paramKey);
val.paramInfo = vm.construct_text_by_express_builder(val.paramInfo || val.paramValue || '');
if (input_opts.isXml) val.attribute = vm.construct_text_by_express_builder(val.attribute || '');
}
} catch (BIND_PARSE_ERROR) {}
let tmp_value = (input_parent_obj[val.paramKey] = val.paramInfo || val.paramValue || '');
if (input_opts.isXml) input_xml_attr_parent_obj[val.paramKey] = val.attribute || '';
if (val.childList && val.childList.length > 0) {
if (input_opts.isXml) input_xml_attr_parent_obj[val.paramKey] = [input_xml_attr_parent_obj[val.paramKey], {}];
let tmp_cb_result;
switch (val.paramType.toString()) {
case '12': {
//array
if ((val.childList[0] || {}).isArrItem) {
//新数据结构,多项值数组
input_parent_obj[val.paramKey] = [];
tmp_cb_result = { has_text: true }; //设置tmp_cb_result,用于确认当前数组已经有内容,无需重新json_parse
val.childList.map((tmp_child_item, tmp_child_key) => {
if (!tmp_child_item.checkbox) return;
let tmp_item_parent_obj = {},
tmp_item_xml_attr_parent_obj = {};
if (tmp_child_item.paramType.toString() === '12' || !(tmp_child_item.childList && tmp_child_item.childList.length > 0)) {
vm.eo_define_arr_to_json(
[tmp_child_item],
tmp_item_parent_obj,
input_opts,
input_opts.isXml ? tmp_item_xml_attr_parent_obj : {}
);
tmp_item_parent_obj = tmp_item_parent_obj[tmp_child_item.paramKey];
tmp_item_xml_attr_parent_obj = tmp_item_xml_attr_parent_obj[tmp_child_item.paramKey];
} else {
vm.eo_define_arr_to_json(
tmp_child_item.childList,
tmp_item_parent_obj,
input_opts,
input_opts.isXml ? tmp_item_xml_attr_parent_obj : {}
);
}
input_parent_obj[val.paramKey].push(tmp_item_parent_obj);
if (input_opts.isXml) {
if (typeof input_xml_attr_parent_obj[val.paramKey][0] !== 'object') input_xml_attr_parent_obj[val.paramKey][0] = [];
input_xml_attr_parent_obj[val.paramKey][0].push(tmp_child_item.attribute || '');
input_xml_attr_parent_obj[val.paramKey].splice(tmp_child_key + 1, 1, tmp_item_xml_attr_parent_obj);
}
});
} else {
//为老数据,第一项数值不存在字段isArrItem
input_parent_obj[val.paramKey] = [{}];
tmp_cb_result = vm.eo_define_arr_to_json(
val.childList,
input_parent_obj[val.paramKey][0],
input_opts,
input_opts.isXml ? input_xml_attr_parent_obj[val.paramKey][1] : {}
);
}
break;
}
default: {
input_parent_obj[val.paramKey] = {};
tmp_cb_result = vm.eo_define_arr_to_json(
val.childList,
input_parent_obj[val.paramKey],
input_opts,
input_opts.isXml ? input_xml_attr_parent_obj[val.paramKey][1] : {}
);
break;
}
}
if (vm.check_empty_obj(tmp_cb_result)) {
try {
input_parent_obj[val.paramKey] = JSON.parse(tmp_value);
} catch (JSON_PARSE_ERROR) {
input_parent_obj[val.paramKey] = tmp_value;
}
}
} else {
let tmp_param_type = val.paramType.toString();
switch (tmp_param_type) {
case '0': {
//字符串
break;
}
case '15': {
//null
input_parent_obj[val.paramKey] = null;
}
case '14': {
//number
let tmp_num_text = input_parent_obj[val.paramKey] || vm.DEFAULT_REFS_FROM_EO_TYPE[tmp_param_type];
if (input_opts.isXml) {
input_parent_obj[val.paramKey] = `${tmp_num_text}`;
} else {
try {
if (JSON.parse(tmp_num_text) > Number.MAX_SAFE_INTEGER) {
input_parent_obj[val.paramKey] = `eo_big_int_${tmp_num_text}`;
} else {
input_parent_obj[val.paramKey] = JSON.parse(tmp_num_text);
}
} catch (JSON_PARSE_ERROR) {
input_parent_obj[val.paramKey] = `${tmp_num_text}`;
}
}
break;
}
default: {
//其他
let tmp_default_value = input_parent_obj[val.paramKey] || vm.DEFAULT_REFS_FROM_EO_TYPE[tmp_param_type];
try {
input_parent_obj[val.paramKey] = JSON.parse(tmp_default_value);
} catch (JSON_PARSE_ERROR) {
input_parent_obj[val.paramKey] = `${tmp_default_value}`;
}
break;
}
}
}
if (input_opts.isXml && input_opts.fnSetXmlAttr && val.attribute) {
input_opts.fnSetXmlAttr(val.paramKey, val.attribute);
}
});
return input_parent_obj;
} | @desc eolinker自定义格式转换为json
@param {array} input_eo_original_arr 原始 eo 定义队列
@param {object} input_parent_obj 父对象
@param {object} input_opts 可选配置项 {needToParseBindData:是否特殊解析定义字符串,isXml:组装内容是否为xml,bindObj:绑定对象,fnSetXmlAttr:xml属性处理函数}
@param {object} input_xml_attr_parent_obj 可选配置项,当且仅当构造对象为xml时生效,为xml父属性继承对象
@return {object} json字符串 | eo_define_arr_to_json ( input_eo_original_arr , input_parent_obj = { } , input_opts = { } , input_xml_attr_parent_obj = { } ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/data_constructor.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/data_constructor.js | Apache-2.0 |
const fnToBuffer = (inStr = {}) => {
let tmpStr;
if (typeof inStr === 'object') {
try {
tmpStr = JSON.stringify(inStr);
} catch (e) {
console.info('zlib.js fnToBuffer stringify error', inStr);
tmpStr = inStr.toString();
}
} else {
tmpStr = String(inStr);
}
return Buffer.from(tmpStr);
}; | @desc 将参数转为buffer数据
@param {*} inStr
@returns | fnToBuffer | javascript | Postcatlab/postcat | src/node/test-server/request/libs/zlib.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/zlib.js | Apache-2.0 |
constructor(code, filename) {
this._code = String(code);
this.filename = filename || 'vm.js';
this._prefix = '';
this._suffix = '';
this._compiledVM = null;
this._compiledNodeVM = null;
} | Create VMScript instance.
@param {String} code Code to run.
@param {String} [filename] Filename that shows up in any stack traces produced from this script.
@return {VMScript} | constructor ( code , filename ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
wrap(prefix, suffix) {
const strPrefix = String(prefix);
const strSuffix = String(suffix);
if (this._prefix === strPrefix && this._suffix === strSuffix) return this;
this._prefix = strPrefix;
this._suffix = strSuffix;
this._compiledVM = null;
this._compiledNodeVM = null;
return this;
} | Wraps the code.
Will invalidate the code cache.
@return {VMScript} | wrap ( prefix , suffix ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
compile() {
return this;
} | Noop.
We need to change the code depending whether it is run in VM or NodeVM.
This function cannot decide for which to compile.
@deprecated Will be done on first run
@return {VMScript} | compile ( ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
get code() {
return this._prefix + this._code + this._suffix;
} | For backwards compatibility.
@return {String} The wrapped code | code ( ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
set code(newCode) {
const strNewCode = String(newCode);
if (strNewCode === this._prefix + this._code + this._suffix) return;
this._code = strNewCode;
this._prefix = '';
this._suffix = '';
this._compiledVM = null;
this._compiledNodeVM = null;
} | For backwards compatibility.
Will invalidate the code cache.
@param {String} newCode The new code to run. | code ( newCode ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
_compileVM() {
if (this._compiledVM) return this;
this._compiledVM = new vm.Script(this._prefix + this._code + this._suffix, {
filename: this.filename,
displayErrors: false
});
return this;
} | Will compile the code for VM and cache it
@return {VMScript} | _compileVM ( ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
_compileNodeVM() {
if (this._compiledNodeVM) return this;
this._compiledNodeVM = new vm.Script(
'(function (exports, require, module, __filename, __dirname) { ' + this._prefix + this._code + this._suffix + '\n})',
{
filename: this.filename,
displayErrors: false
}
);
return this;
} | Will compile the code for NodeVM and cache it
@return {VMScript} | _compileNodeVM ( ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
constructor(message) {
super(message);
this.name = 'VMError';
Error.captureStackTrace(this, this.constructor);
} | Create VMError instance.
@param {String} message Error message.
@return {VMError} | constructor ( message ) | javascript | Postcatlab/postcat | src/node/test-server/request/libs/script-engines/vm2/lib/main.js | https://github.com/Postcatlab/postcat/blob/master/src/node/test-server/request/libs/script-engines/vm2/lib/main.js | Apache-2.0 |
module.exports = async function help(args, output) {
if (!output)
output = process.stderr;
output.write('LUIS Command Line Interface - © 2018 Microsoft Corporation\n\n');
const helpContents = await getHelpContents(args, output);
let width = windowSize ? windowSize.width : 250;
let leftColWidth = 0;
for (let hc of helpContents) {
if (hc.table && hc.table[0].length > 0) {
for (let row in hc.table) {
let len = hc.table[row][0].length;
if (len > leftColWidth) {
leftColWidth = Math.min(len, Math.floor(width / 3));
}
}
}
}
helpContents.forEach(helpContent => {
output.write(chalk.white.bold(helpContent.head + '\n'));
if (helpContent.table && helpContent.table[0].length > 0) {
const rows = helpContent.table[0].length;
let i = rows - 1;
const colWidthsFor2On = ((width * .85) - leftColWidth) / i;
const colWidths = [leftColWidth];
while (i--) {
colWidths.push(~~colWidthsFor2On);
}
const table = new Table({
// don't use lines for table
chars: {
'top': '', 'top-mid': '', 'top-left': '', 'top-right': '',
'bottom': '', 'bottom-mid': '', 'bottom-left': '', 'bottom-right': '',
'left': '', 'left-mid': '', 'right': '', 'right-mid': '',
'mid': '', 'mid-mid': '', 'middle': ''
},
colWidths,
style: { 'padding-left': 1, 'padding-right': 1 },
wordWrap: true
});
table.push(...helpContent.table);
output.write(table.toString());
}
output.write('\n\n');
});
} | Displays help content from the arguments.
@param args The arguments input by the user
@returns {Promise<void>} | help ( args , output ) | javascript | microsoft/botbuilder-tools | packages/LUIS/bin/help.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/LUIS/bin/help.js | MIT |
async function getHelpContents(args, output) {
if ('!' in args) {
return getAllCommands(process.stdout);
}
if (args._.length == 0) {
return getGeneralHelpContents(output);
}
else if (args._.length == 1) {
return getVerbHelp(args._[0], output);
} else if (args._.length >= 2) {
const operation = getOperation(args._[0], args._[1]);
if (operation) {
output.write(`${operation.description}\n\n`);
output.write(`Usage:\n${chalk.cyan.bold(operation.command)}\n\n`);
return getHelpContentsForOperation(operation, output);
} else {
return getVerbHelp(args._[0], output);
}
}
return getGeneralHelpContents(output);
} | Retrieves help content vie the luis.json from
the arguments input by the user.
@param args The arguments input by the user
@returns {Promise<*>}1] | getHelpContents ( args , output ) | javascript | microsoft/botbuilder-tools | packages/LUIS/bin/help.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/LUIS/bin/help.js | MIT |
function getVerbHelp(verb, output) {
let targets = [];
let options = {
head: `Available resources for ${chalk.bold(verb)}:`,
table: []
};
// special verbs
let sections = [];
switch (verb) {
case "query":
output.write(chalk.cyan.bold("luis query --query <querytext> [--appId | --endpoint | --nologging | --region | --spellCheck | --staging | --subscriptionKey | --timezoneOffset | --timing | --verbose]\n\n"))
options.table.push([chalk.cyan.bold("--query <query>"), "Query to analyze with LUIS prediction."]);
options.table.push([chalk.cyan.bold("--endpoint <endpointUrl>"), "Endpoint to use for query like https://westus.api.cognitive.microsoft.com, overrides region and cloud."]);
options.table.push([chalk.cyan.bold("--nologging"), "Turn off query logging in LUIS."]);
options.table.push([chalk.cyan.bold("--spellCheck <key>"), "Check spelling using your Bing spelling key."]);
options.table.push([chalk.cyan.bold("--staging"), "Use the staging environtment rather than production."]);
options.table.push([chalk.cyan.bold("--timezoneOffset <minutes>"), "Specify the timezone offset in minutes used for resolving data time."]);
options.table.push([chalk.cyan.bold("--timing [iterations]"), "Perform timings on query, default is 5 iterations."]);
options.table.push([chalk.cyan.bold("--verbose"), "Include scores for all intents."]);
sections.push(options);
sections.push(configSection);
sections.push(globalArgs);
return sections;
case "set":
output.write(chalk.cyan.bold("luis set <appIdOrName> [--appId|--versionId|--authoringKey|--endpoint] <value>\n\n"))
options.table.push([chalk.cyan.bold("<appIdOrName>"), "change the active application by looking it up by name or id"]);
options.table.push([chalk.cyan.bold("--appId <appId>"), "change the active application id "]);
options.table.push([chalk.cyan.bold("--versionId <version>"), "change the active version id "]);
options.table.push([chalk.cyan.bold("--authoringKey <authoringKey>"), "change the active authoringKey "]);
options.table.push([chalk.cyan.bold("--endpoint <endpointUrl>"), "change the endpoint like https://westus.api.cognitive.microsoft.com"]);
sections.push(options);
sections.push(configSection);
sections.push(globalArgs);
return sections;
}
for (let iOperation in operations) {
let operation = operations[iOperation];
if (operation.methodAlias == verb) {
let target = operation.target[0];
if (targets.indexOf(target) < 0) {
targets.push(target);
}
}
}
if (targets.length == 0)
return getGeneralHelpContents(output);
targets.sort();
for (let target of targets) {
options.table.push([chalk.cyan.bold(target), '']);
}
sections.push(options);
sections.push(configSection);
sections.push(globalArgs);
return sections;
} | General verb help contents
@returns {*[]} | getVerbHelp ( verb , output ) | javascript | microsoft/botbuilder-tools | packages/LUIS/bin/help.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/LUIS/bin/help.js | MIT |
async function initializeConfig() {
process.stdout.write(chalk.cyan.bold('\nThis util will walk you through creating a .luisrc file\n\nPress ^C at any time to quit.\n\n'));
//const validRegions = 'westus westus2 eastus eastus2 westcentralus southcentralus westeurope northeurope southeastasia eastasia australiaeast brazilsouth'.split(' ');
const validRegions = 'westus westeurope australiaeast'.split(' ');
const questions = [
'What is your LUIS Authoring key (from luis.ai portal User Settings page)? ',
`What is your region? [${validRegions.join(', ')}] `,
'What is your LUIS App ID? [Default: skip] ',
'What is your LUIS Version ID? [Default: 0.1] ',
];
const prompt = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const answers = [];
for (let i = 0; i < questions.length; i++) {
const question = questions[i];
const answer = await new Promise((resolve) => {
function doPrompt(promptMessage) {
prompt.question(promptMessage, response => {
if (i === 1 && (!response || !validRegions.includes(response))) {
doPrompt(chalk.red.bold(`${response} is not a valid region`) + '\n' + question);
} else {
resolve(response);
}
});
}
doPrompt(question);
});
if (i == 2 && answer.trim().length == 0)
break;
answers.push(answer.trim());
}
const [authoringKey, region, appId, versionId] = answers;
const config = Object.assign({}, {
appId,
authoringKey,
versionId,
region: region,
});
try {
await new Promise((resolve, reject) => {
const confirmation = `\n\nDoes this look ok?\n${JSON.stringify(config, null, 2)}\n[Yes]/No: `;
prompt.question(confirmation, response => {
/^(y|yes)$/.test((response || 'yes').toLowerCase()) ? resolve(response) : reject();
});
});
} catch (e) {
return false;
}
await fs.writeJson(path.join(process.cwd(), '.luisrc'), config, { spaces: 2 });
return true;
} | Walks the user though the creation of the .luisrc
file and writes it to disk. the App and Version IDs
are optional but if omitted, --appId and --versionId
flags may be required for some commands.
@returns {Promise<*>} | initializeConfig ( ) | javascript | microsoft/botbuilder-tools | packages/LUIS/bin/luis.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/LUIS/bin/luis.js | MIT |
async function getFileInput(filename) {
if (typeof filename !== 'string') {
return null;
}
// Let any errors fall through to the runProgram() promise
return await fs.readJSON(path.resolve(filename));
} | Retrieves the input file to send as
the body of the request.
@param args
@returns {Promise<*>} | getFileInput ( filename ) | javascript | microsoft/botbuilder-tools | packages/LUIS/bin/luis.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/LUIS/bin/luis.js | MIT |
function validateConfig(config) {
// appId and versionId are not validated here since
// not all operations require these to be present.
// Validation of specific params are done in the
// ServiceBase.js
const { authoringKey, region } = config;
const messageTail = `is missing from the configuration.\n\nDid you run ${chalk.cyan.bold('luis init')} yet?`;
assert(typeof authoringKey === 'string', `The authoringKey ${messageTail}`);
assert(typeof region === 'string', `The region ${messageTail}`);
assert(args.region == 'westus' || args.region == 'westeurope' || args.region == 'australiaeast' || args.region == 'virginia', `${args.region} is not a valid authoring region. Valid values are [westus|westeurope|australiaeast|virginia]`);
} | Validates the config object to contain the
fields necessary for endpoint calls.
@param {*} config The config object to validate | validateConfig ( config ) | javascript | microsoft/botbuilder-tools | packages/LUIS/bin/luis.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/LUIS/bin/luis.js | MIT |
async function validateArguments(args, operation) {
let error = new Error();
let body = undefined;
error.name = 'ArgumentError';
if (!operation) {
let verbs = ["add", "clone", "delete", "export", "package", "get", "import", "list", "publish", "query", "set", "suggest", "train", "update"];
if (verbs.indexOf(args._[0]) < 0)
error.message = `'${args._[0]}' is not a valid action`;
else if (args._.length >= 2)
error.message = `'${args._[1]}' is not a valid resource`;
else
error.message = `missing resource\n`;
throw error;
}
switch (operation.target[0]) {
case "userazureaccounts":
case "appazureaccounts":
switch (operation.methodAlias) {
case "list":
if (args.hasOwnProperty("armToken")) {
args.customHeaders["Authorization"] = `Bearer ${args.armToken}`;
operation.entityName = operation.entityName.replace('armToken', '');
operation.entityType = operation.entityType.replace('armToken', '');
}
break;
}
break;
case "appazureaccount":
switch (operation.methodAlias) {
case "add":
case "delete":
if (args.hasOwnProperty("armToken")) {
args.customHeaders["Authorization"] = `Bearer ${args.armToken}`;
operation.entityName = operation.entityName.replace('armToken', '');
operation.entityType = operation.entityType.replace('armToken', '');
}
break;
}
break;
}
const extractEntities = async (entitySpecified) => {
if (entitySpecified) {
const files = args.in.split(',');
const getFileInputPromises = files.map(async (file) => {
return await getFileInput(file);
});
const fileInput = await Promise.all(getFileInputPromises);
body = fileInput.reduce((accumulator, currentValue) => (Object.assign(accumulator, currentValue)), {});
if (body.armToken) {
args.customHeaders["Authorization"] = `Bearer ${body.armToken}`;
}
}
else {
// make up a request body from command line args
switch (operation.target[0]) {
case "version":
switch (operation.methodAlias) {
case "publish":
if (args.versionId) {
body = {
versionId: `${args.versionId}`,
isStaging: args.staging === 'true'
};
}
break;
case "rename":
case "update":
case "clone":
if (args.newVersionId) {
body = {
version: `${args.newVersionId}`
};
}
break;
}
break;
case "settings":
switch (operation.methodAlias) {
case "update":
if (args.hasOwnProperty("public")) {
body = {
isPublic: args.public === 'true'
};
}
break;
}
break;
case "versionsettings":
switch (operation.methodAlias) {
case "update":
if (args.hasOwnProperty("useAllTrainingData")) {
body = [{
name: "UseAllTrainingData",
value: args.useAllTrainingData === 'true' ? 'true' : 'false'
}];
}
break;
}
break;
case "appazureaccount":
if (args.azureSubscriptionId && args.resourceGroup && args.accountName) {
body = {
azureSubscriptionId: `${args.azureSubscriptionId}`,
resourceGroup: `${args.resourceGroup}`,
accountName: `${args.accountName}`
};
}
break;
default:
error.message = `The --in requires an input of type: ${operation.entityType}`;
throw error;
}
}
}
const entitySpecified = typeof args.in === 'string';
const entityRequired = !!operation.entityName;
if (entityRequired) {
await extractEntities(entitySpecified);
}
return body;
// Note that the ServiceBase will validate params that may be required.
}
async function handleQueryCommand(args, config) {
let query = args.q || args.query;
if (!query) {
process.stderr.write(chalk.red.bold(`missing -q\n`));
return help(args);
}
if (!args.appId) {
process.stderr.write(chalk.red.bold(`missing --appId\n`));
return help(args);
}
let subscriptionKey = args.subscriptionKey || config.authoringKey;
if (!subscriptionKey) {
process.stderr.write(chalk.red.bold(`missing --subscriptionKey\n`));
return help(args);
}
let uri;
if (args.endpoint) {
uri = `${args.endpoint}/luis/v2.0/apps/${args.appId}`;
} else {
if (args.region && args.cloud) {
uri = `https://${args.region}.api.cognitive.microsoft.${args.cloud}/luis/v2.0/apps/${args.appId}`;
}
else {
process.stderr.write(chalk.red.bold(`missing --region or --endpoint\n`));
return help(args);
}
}
if (query && subscriptionKey && uri) {
var qargs = {
log: !args.nologging,
staging: args.staging,
"subscription-key": `${subscriptionKey}`,
verbose: args.verbose,
q: `${query}`
};
if (args.spellCheck) {
qargs.spellCheck = true;
qargs["bing-spell-check-subscription-key"] = args.spellCheck;
}
if (args.timezoneOffset) {
qargs.timezoneOffset = args.timezoneOffset;
}
var options = {
uri: uri,
method: "GET",
qs: qargs,
json: true
}
let timings = args.t || args.timing;
if (args.timing) {
let samples = typeof timings === 'boolean' ? 5 : timings;
let total = 0.0;
let sq = 0.0;
let min = Number.MAX_VALUE;
let max = Number.MIN_VALUE;
let values = [];
for (let i = 0; i <= samples; ++i) {
let start = performance.now();
await request(options);
let elapsed = performance.now() - start;
console.log(`${i}: ${elapsed} ms`);
if (i > 0) {
total += elapsed;
sq += elapsed * elapsed;
if (elapsed > max) max = elapsed;
if (elapsed < min) min = elapsed;
values.push(elapsed);
}
}
values.sort((a, b) => a - b);
let variance = (sq - (total * total / samples)) / (samples - 1);
let p95 = values[Math.floor((samples - 1) * 0.95)];
console.log(`Timing after 1st: [${min} ms, ${total / samples} ms, ${max} ms], stddev ${Math.sqrt(variance)} ms, P95 ${p95} ms`)
}
else {
let result = await request(options);
await stdoutAsync(JSON.stringify(result, null, 2) + "\n");
}
return;
}
return help(args);
}
async function handleSetCommand(args, config, client) {
if (args.length == 1 && !(args.a || args.appId || args.applicationId || args.versionId || args.authoringKey || args.region || args.versionId || args.endpoint)) {
process.stderr.write(chalk.red.bold(`missing .luisrc argument name: [-appId|--applicationId|--versionId|--region|--authoringKey || --endpoint]\n`));
return help(args);
}
config.region = args.region || config.region;
config.authoringKey = args.authoringKey || config.authoringKey;
config.versionId = args.versionId || config.versionId;
config.appId = args.appId || args.applicationId || config.appId;
if (args.endpoint) config.endpoint = args.endpoint;
if (args._.length > 1) {
let targetAppName = args._[1].toLowerCase();
if (targetAppName) {
let results = await client.apps.list(args);
if (results.error) {
throw new Error(results.error);
}
let found = false;
for (let app of results) {
if (app.name.toLowerCase() == targetAppName || app.id.toLowerCase() == targetAppName) {
config.appId = app.id;
config.versionId = app.activeVersion;
found = true;
break;
}
}
if (!found)
throw new Error(`Did not find an application with id or name of '${targetAppName}'`);
}
}
await fs.writeJson(path.join(process.cwd(), '.luisrc'), config, { spaces: 2 });
await stdoutAsync(JSON.stringify(config, null, 4) + "\n");
return true;
}
runProgram()
.then(() => process.exit())
.catch(async (error) => {
process.stderr.write('\n' + chalk.red.bold(error.message + '\n\n'));
await help(args);
process.exitCode = 1;
}); | Provides basic validation of the command arguments.
@param serviceManifest | validateArguments ( args , operation ) | javascript | microsoft/botbuilder-tools | packages/LUIS/bin/luis.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/LUIS/bin/luis.js | MIT |
validateLUISBlob: async function (LUISJSONBlob) {
// look for entity name collisions - list, simple, patternAny, phraselist
// look for list entities labelled
// look for prebuilt entity labels in utterances
let entitiesList = [];
let entityFound = '';
if (LUISJSONBlob.entities.length > 0) {
LUISJSONBlob.entities.forEach(function (entity) {
entitiesList.push(new helperClass.validateLUISBlobEntity(entity.name, ['simple'], entity.roles));
});
}
if (LUISJSONBlob.closedLists.length > 0) {
LUISJSONBlob.closedLists.forEach(function (entity) {
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
if (entityFound.length === 0) {
entitiesList.push(new helperClass.validateLUISBlobEntity(entity.name, ['list'], entity.roles));
} else {
entityFound[0].type.push('list');
}
});
}
if (LUISJSONBlob.patternAnyEntities.length > 0) {
LUISJSONBlob.patternAnyEntities.forEach(function (entity) {
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
if (entityFound.length === 0) {
entitiesList.push(new helperClass.validateLUISBlobEntity(entity.name, ['patternAny'], entity.roles));
} else {
entityFound[0].type.push('patternAny');
}
});
}
if (LUISJSONBlob.regex_entities.length > 0) {
LUISJSONBlob.regex_entities.forEach(function (entity) {
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
if (entityFound.length === 0) {
entitiesList.push(new helperClass.validateLUISBlobEntity(entity.name, [`regEx:/${entity.regexPattern}/`], entity.roles));
} else {
if (entityFound[0].regexPattern !== undefined) {
if (entityFound[0].regexPattern !== entity.regexPattern)
entityFound[0].type.push(`regEx:/${entity.regexPattern}/`);
} else {
entityFound[0].type.push(`regEx:/${entity.regexPattern}/`);
}
}
});
}
// add any composite entities to entities list.
const compositesEnt = (LUISJSONBlob.composites || []);
compositesEnt.forEach(entity => {
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
if (entityFound.length === 0) {
entitiesList.push(new helperClass.validateLUISBlobEntity(entity.name, ['composite'], entity.roles));
} else {
entityFound[0].type.push('composite');
}
})
// add any pre-built entities to the entities list.
const prebuiltEnt = (LUISJSONBlob.prebuiltEntities || []);
prebuiltEnt.forEach(entity => {
entityFound = helpers.filterMatch(entitiesList, 'name', entity.name);
if (entityFound.length === 0) {
entitiesList.push(new helperClass.validateLUISBlobEntity(entity.name, ['prebuilt'], entity.roles));
} else {
entityFound[0].type.push('prebuilt');
}
})
// for each entityFound, see if there are duplicate definitions
entitiesList.forEach(function (entity) {
if (entity.type.length > 1) {
let errorMsg = `Entity ${entity.name} has duplicate definitions.\r\n\t` + JSON.stringify(entity.type, 2, null);
let error = BuildDiagnostic({ message: errorMsg });
throw (new exception(retCode.errorCode.DUPLICATE_ENTITIES, error.toString()));
}
});
// do we have utterances with phraselist entities?
if (LUISJSONBlob.utterances.length > 0) {
LUISJSONBlob.utterances.forEach(function (utterance) {
if (utterance.entities.length > 0) {
utterance.entities.forEach(function (entity) {
let entityInList = helpers.filterMatch(entitiesList, 'name', entity.entity);
if (entityInList.length > 0) {
if (entityInList[0].type.includes('phraseList')) {
let errorMsg = `Utterance "${utterance.text}" has reference to PhraseList. \r\n\tYou cannot have utterances with phraselist references in them`;
let error = BuildDiagnostic({ message: errorMsg });
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
}
});
}
});
}
// validate composite entities
if (LUISJSONBlob.composites.length > 0) {
LUISJSONBlob.composites.forEach(composite => {
// composites cannot include pattern.any entities as children
if (LUISJSONBlob.patternAnyEntities.length > 0) {
let patternAnyEntityInComposite = (LUISJSONBlob.patternAnyEntities || []).find(patternAnyEntity => {
return composite.children.includes(patternAnyEntity.name);
});
if (patternAnyEntityInComposite !== undefined) {
let errorMsg = `Composite entity "${composite.name}" includes pattern.any entity "${patternAnyEntityInComposite.name}".\r\n\tComposites cannot include pattern.any entity as a child.`;
let error = BuildDiagnostic({ message: errorMsg });
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
}
// composite entity definitions must have valid child entity type definitions.
composite.children.forEach(child => {
// Fix for #1165
// Current implementation does not account for explicit role included in a child
let childEntityName = child;
let childEntityRole = '';
if (child.includes(':')) {
let childSplit = child.split(':').map(item => item.trim());
childEntityName = childSplit[0];
childEntityRole = childSplit[1];
}
let compositeChildEntityFound = (entitiesList || []).find(entity => entity.name == childEntityName);
if (compositeChildEntityFound === undefined) {
let errorMsg = `Composite entity "${composite.name}" includes an undefined child entity "${childEntityName}".\r\n\tAll children of composite entities must be explicitly defined or implicitly defined via an utterance or a pattern`;
let error = BuildDiagnostic({ message: errorMsg });
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
if (childEntityRole != '') {
if (!compositeChildEntityFound.roles.includes(childEntityRole)) {
let errorMsg = `Composite entity "${composite.name}" includes an undefined child entity role "${childEntityName}:${childEntityRole}".\r\n\tAll children of composite entities must be explicitly defined or implicitly defined via an utterance or a pattern`;
let error = BuildDiagnostic({ message: errorMsg });
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
}
})
})
}
return true;
}, | Helper function to validate parsed LUISJsonblob
@param {Object} LUISJSONBlob input LUIS Json blob
@returns {Boolean} True if validation succeeds.
@throws {exception} Throws on errors. exception object includes errCode and text. | validateLUISBlob ( LUISJSONBlob ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
parseFile: async function (fileContent, log, locale) {
fileContent = helpers.sanitizeNewLines(fileContent);
let parsedContent = new parserObj();
await parseLuAndQnaWithAntlr(parsedContent, fileContent.toString(), log, locale);
return parsedContent;
}, | Main parser code to parse current file contents into LUIS and QNA sections.
@param {string} fileContent current file content
@param {boolean} log indicates if we need verbose logging.
@param {string} locale LUIS locale code
@returns {parserObj} Object with that contains list of additional files to parse, parsed LUIS object and parsed QnA object
@throws {exception} Throws on errors. exception object includes errCode and text. | parseFile ( fileContent , log , locale ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
collateQnAFiles: async function (parsedQnAList) {
let FinalQnAJSON = new qna();
parsedQnAList.forEach(function (blob) {
blob = blob.qnaJsonStructure;
// does this blob have URLs?
if (blob.urls.length > 0) {
// add this url if this does not already exist in finaljson
blob.urls.forEach(function (qnaUrl) {
if (!FinalQnAJSON.urls.includes(qnaUrl)) {
FinalQnAJSON.urls.push(qnaUrl);
}
});
}
// does this blob have files?
if (blob.files.length > 0) {
// add this url if this does not already exist in finaljson
blob.files.forEach(function (qnaFile) {
if (FinalQnAJSON.files.filter(item => { return item.fileUri == qnaFile.fileUri }).length === 0) {
FinalQnAJSON.files.push(qnaFile);
}
});
}
// does this blob have qnapairs?
if (blob.qnaList.length > 0) {
// walk through each qnaPair and add it if it does not exist
blob.qnaList.forEach(function (newQnAItem) {
if (FinalQnAJSON.qnaList.length == 0) {
FinalQnAJSON.qnaList.push(newQnAItem);
} else {
let qnaExists = false;
let fIndex = 0;
for (fIndex in FinalQnAJSON.qnaList) {
if (deepEqual(FinalQnAJSON.qnaList[fIndex], newQnAItem)) {
qnaExists = true;
break;
}
}
if (!qnaExists) FinalQnAJSON.qnaList.push(newQnAItem);
}
});
}
if (blob.name !== undefined) FinalQnAJSON.name = blob.name;
});
return FinalQnAJSON;
}, | Handle collating all QnA sections across all parsed files into one QnA collection
@param {qna []} parsedQnAList Array of parsed QnA blobs
@returns {qna} Collated qna object
@throws {exception} Throws on errors. exception object includes errCode and text. | collateQnAFiles ( parsedQnAList ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
addItemIfNotPresent: function (collection, type, value) {
let hasValue = false;
for (let i in collection[type]) {
if (collection[type][i].name === value) {
hasValue = true;
break;
}
}
if (!hasValue) {
let itemObj = {};
itemObj.name = value;
if (type == LUISObjNameEnum.PATTERNANYENTITY) {
itemObj.explicitList = [];
}
if (type !== LUISObjNameEnum.INTENT) {
itemObj.roles = [];
}
collection[type].push(itemObj);
}
}, | Helper function to add an item to collection if it does not exist
@param {object} collection contents of the current collection
@param {LUISObjNameEnum} type item type
@param {object} value value of the current item to examine and add
@returns {void} nothing | addItemIfNotPresent ( collection , type , value ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
addItemOrRoleIfNotPresent: function (collection, type, value, roles) {
let existingItem = collection[type].filter(item => item.name == value);
if (existingItem.length !== 0) {
// see if the role exists and if so, merge
mergeRoles(existingItem[0].roles, roles);
} else {
let itemObj = {};
itemObj.name = value;
if (type == LUISObjNameEnum.PATTERNANYENTITY) {
itemObj.explicitList = [];
}
if (type == LUISObjNameEnum.COMPOSITES) {
itemObj.children = [];
}
if (type !== LUISObjNameEnum.INTENT) {
itemObj.roles = roles;
}
collection[type].push(itemObj);
}
} | Helper function to add an item to collection if it does not exist
@param {object} collection contents of the current collection
@param {LUISObjNameEnum} type item type
@param {object} value value of the current item to examine and add
@param {string []} roles possible roles to add to the item
@returns {void} nothing | addItemOrRoleIfNotPresent ( collection , type , value , roles ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const parseLuAndQnaWithAntlr = async function (parsedContent, fileContent, log, locale) {
fileContent = helpers.sanitizeNewLines(fileContent);
let luResource = luParser.parse(fileContent);
if (luResource.Errors && luResource.Errors.length > 0) {
if (log) {
process.stdout.write(luResource.Errors.filter(error => error.Severity === DiagnosticSeverity.WARN).map(warn => warn.toString()).join('\n').concat('\n'));
}
var errors = luResource.Errors.filter(error => error.Severity === DiagnosticSeverity.ERROR);
if (errors.length > 0) {
throw (new exception(retCode.errorCode.INVALID_LINE, errors.map(error => error.toString()).join('\n')));
}
}
// parse reference section
await parseAndHandleReference(parsedContent, luResource);
// parse intent section
parseAndHandleIntent(parsedContent, luResource);
// parse entity section
parseAndHandleEntity(parsedContent, luResource, log, locale);
// parse qna section
parseAndHandleQna(parsedContent, luResource);
// parse model info section
parseAndHandleModelInfo(parsedContent, luResource, log);
} | Main parser code to parse current file contents into LUIS and QNA sections.
@param {parserObj} Object with that contains list of additional files to parse, parsed LUIS object and parsed QnA object
@param {string} fileContent current file content
@param {boolean} log indicates if we need verbose logging.
@param {string} locale LUIS locale code
@throws {exception} Throws on errors. exception object includes errCode and text. | parseLuAndQnaWithAntlr ( parsedContent , fileContent , log , locale ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const parseAndHandleReference = async function (parsedContent, luResource) {
// handle reference
let luImports = luResource.Imports;
if (luImports && luImports.length > 0) {
for (const luImport of luImports) {
let linkValueText = luImport.Description.replace('[', '').replace(']', '');
let linkValue = luImport.Path.replace('(', '').replace(')', '');
let parseUrl = url.parse(linkValue);
if (parseUrl.host || parseUrl.hostname) {
let options = { method: 'HEAD' };
let response;
try {
response = await fetch(linkValue, options);
} catch (err) {
// throw, invalid URI
let errorMsg = `URI: "${linkValue}" appears to be invalid. Please double check the URI or re-try this parse when you are connected to the internet.`;
let error = BuildDiagnostic({
message: errorMsg,
context: luImport.ParseTree
})
throw (new exception(retCode.errorCode.INVALID_URI, error.toString()));
}
if (!response.ok) {
let errorMsg = `URI: "${linkValue}" appears to be invalid. Please double check the URI or re-try this parse when you are connected to the internet.`;
let error = BuildDiagnostic({
message: errorMsg,
context: luImport.ParseTree
})
throw (new exception(retCode.errorCode.INVALID_URI, error.toString()));
}
let contentType = response.headers.get('content-type');
if (!contentType.includes('text/html')) {
parsedContent.qnaJsonStructure.files.push(new qnaFile(linkValue, linkValueText));
} else {
parsedContent.qnaJsonStructure.urls.push(linkValue);
}
} else {
parsedContent.additionalFilesToParse.push(new fileToParse(linkValue));
}
}
}
} | Reference parser code to parse reference section.
@param {parserObj} Object with that contains list of additional files to parse, parsed LUIS object and parsed QnA object
@param {LUResouce} luResource resources extracted from lu file content
@throws {exception} Throws on errors. exception object includes errCode and text. | parseAndHandleReference ( parsedContent , luResource ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const parseAndHandleQna = function (parsedContent, luResource) {
// handle QNA
let qnas = luResource.Qnas;
if (qnas && qnas.length > 0) {
for (const qna of qnas) {
let questions = qna.Questions;
let filterPairs = qna.FilterPairs;
let metadata = [];
if (filterPairs && filterPairs.length > 0) {
filterPairs.forEach(pair => metadata.push(new qnaMetaDataObj(pair.key, pair.value)));
}
let answer = qna.Answer;
parsedContent.qnaJsonStructure.qnaList.push(new qnaListObj(0, answer.trim(), 'custom editorial', questions, metadata));
}
}
} | Intent parser code to parse intent section.
@param {parserObj} Object with that contains list of additional files to parse, parsed LUIS object and parsed QnA object
@param {LUResouce} luResource resources extracted from lu file content
@throws {exception} Throws on errors. exception object includes errCode and text. | parseAndHandleQna ( parsedContent , luResource ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const mergeResults = function (blob, finalCollection, type) {
if (blob[type].length > 0) {
blob[type].forEach(function (blobItem) {
if (finalCollection[type].length === 0) {
finalCollection[type].push(blobItem);
return;
}
// add if this item if it does not already exist in final collection
let itemExists = false;
for (let fIndex in finalCollection[type]) {
if (deepEqual(finalCollection[type][fIndex], blobItem)) {
itemExists = true;
break;
} else {
// if item name matches, merge roles if available for everything other than intent
if (type === LUISObjNameEnum.INTENT || type === LUISObjNameEnum.PATTERNS || type === LUISObjNameEnum.UTTERANCE) continue;
if (finalCollection[type][fIndex].name === blobItem.name) {
itemExists = true;
(blobItem.roles || []).forEach(blobRole => {
if (finalCollection[type][fIndex].roles !== undefined) {
if (!finalCollection[type][fIndex].roles.includes(blobRole)) {
finalCollection[type][fIndex].roles.push(blobRole);
}
}
});
}
}
}
if (!itemExists) {
finalCollection[type].push(blobItem);
}
});
}
}; | Helper function to merge item if it does not already exist
@param {object} blob Contents of all parsed file blobs
@param {object} finalCollection Reference to the final collection of items
@param {LUISObjNameEnum} type Enum type of possible LUIS object types
@returns {void} Nothing | mergeResults ( blob , finalCollection , type ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const mergeResults_closedlists = function (blob, finalCollection, type) {
if (blob[type].length > 0) {
blob[type].forEach(function (blobItem) {
let listInFinal = helpers.filterMatch(finalCollection[type], 'name', blobItem.name);
if (listInFinal.length === 0) {
finalCollection[type].push(blobItem);
} else {
blobItem.subLists.forEach(function (blobSLItem) {
// see if there is a sublist match in listInFinal
let slInFinal = helpers.filterMatch(listInFinal[0].subLists, 'canonicalForm', blobSLItem.canonicalForm);
if (slInFinal.length === 0) {
listInFinal[0].subLists.push(blobSLItem);
} else {
// there is a canonical form match. See if the values all exist
blobSLItem.list.forEach(function (listItem) {
if (!slInFinal[0].list.includes(listItem)) slInFinal[0].list.push(listItem);
})
}
});
// merge roles if they are different
(blobItem.roles || []).forEach(blobRole => {
if (!listInFinal[0].roles.includes(blobRole)) {
listInFinal[0].roles.push(blobRole);
}
})
}
});
}
}; | Helper function to merge closed list item if it does not already exist
@param {object} blob Contents of all parsed file blobs
@param {object} finalCollection Reference to the final collection of items
@param {LUISObjNameEnum} type Enum type of possible LUIS object types
@returns {void} nothing | mergeResults_closedlists ( blob , finalCollection , type ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const flattenLists = function (list, retObj, parentIdx) {
let retValue = []
if (list.entity !== undefined) list.entity = list.entity.trim();
if (list.role !== undefined) list.role = list.role.trim();
if (list.startPos !== undefined) list.startPos = parentIdx;
let offset = 0;
list.value.forEach((item, idx) => {
if (item instanceof helperClass.parserEntity) {
let valuesToInsert = flattenLists(item, retObj, offset + parentIdx);
if (valuesToInsert.length > 0) {
retValue = retValue.concat(valuesToInsert);
offset += valuesToInsert.length;
}
} else {
retValue.push(item);
if (item === ' ') {
if (idx !== 0 && idx !== (list.value.length - 1)) {
offset++;
}
} else {
offset++;
}
}
});
if (list.value.length === 0) {
list.type = LUISObjNameEnum.PATTERNANYENTITY;
if (list.role != '') {
retValue = `{${list.entity}:${list.role}}`.split('');
} else {
retValue = `{${list.entity}}`.split('');
}
} else {
list.type = LUISObjNameEnum.ENTITIES;
}
retValue = retValue.join('').trim();
if (list.endPos !== undefined) list.endPos = parentIdx + retValue.length - 1;
retObj.entitiesFound.push(new helperClass.parserEntity(undefined, list.startPos, list.entity, retValue, list.endPos, list.type, list.role));
return retValue.split('');
}; | Helper function to recursively pull entities from parsed utterance text
@param {parserEntity} list
@param {Object} retObj {entitiesFound, utteranceWithoutEntityLabel}
@param {number} parentIdx index where this list occurs in the parent
@returns {string[]} resolved values to add to the parent list
@throws {exception} Throws on errors. exception object includes errCode and text. | flattenLists ( list , retObj , parentIdx ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const mergeRoles = function (srcEntityRoles, tgtEntityRoles) {
const rolesMap = srcEntityRoles.reduce((map, role) => (map[role] = true, map), {});
tgtEntityRoles.forEach(role => {
if (!rolesMap[role]) {
srcEntityRoles.push(role);
}
});
} | Helper function merge roles
@param {string []} srcEntityRoles contents of the current collection
@param {string []} tgtEntityRoles target entity roles collection to merge
@returns {void} nothing | mergeRoles ( srcEntityRoles , tgtEntityRoles ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const itemExists = function (collection, entityName, entityRoles) {
let matchInClosedLists = helpers.filterMatch(collection, 'name', entityName);
if (matchInClosedLists.length !== 0) {
// merge roles if there are any roles in the pattern entity
if (entityRoles.length !== 0) {
mergeRoles(matchInClosedLists[0].roles, entityRoles);
}
return true;
}
return false;
} | Helper function that returns true if the item exists. Merges roles before returning
@param {Object} collection contents of the current collection
@param {string} entityName name of entity to look for in the current collection
@param {string []} entityRoles target entity roles collection to merge
@returns {void} nothing | itemExists ( collection , entityName , entityRoles ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
const VerifyAndUpdateSimpleEntityCollection = function (parsedContent, entityName, entityType) {
let entityRoles = [];
// Find this entity if it exists in the simple entity collection
let simpleEntityExists = (parsedContent.LUISJsonStructure.entities || []).find(item => item.name == entityName);
if (simpleEntityExists !== undefined) {
// take and add any roles into the roles list
(simpleEntityExists.roles || []).forEach(role => !entityRoles.includes(role) ? entityRoles.push(role) : undefined);
// remove this simple entity definition
// Fix for #1137.
// Current behavior does not allow for simple and phrase list entities to have the same name.
if (entityType != 'Phrase List') {
for (var idx = 0; idx < parsedContent.LUISJsonStructure.entities.length; idx++) {
if (parsedContent.LUISJsonStructure.entities[idx].name === simpleEntityExists.name) {
parsedContent.LUISJsonStructure.entities.splice(idx, 1);
}
}
}
}
// Find if this entity is referred in a labelled utterance
let entityExistsInUtteranceLabel = (parsedContent.LUISJsonStructure.utterances || []).find(item => {
let entityMatch = (item.entities || []).find(entity => entity.entity == entityName)
if (entityMatch !== undefined) return true;
return false;
});
if (entityExistsInUtteranceLabel !== undefined) {
let entityMatch = entityExistsInUtteranceLabel.entities.filter(item => item.entity == entityName);
entityMatch.forEach(entity => {
if (entity.role !== undefined) {
if (!entityRoles.includes(entity.role)) {
entityRoles.push(entity.role);
}
} else if (entityType !== 'Phrase List') { // Fix for # 1151. Phrase lists can have same name as other entities.
let errorMsg = `'${entityType}' entity: "${entityName}" is added as a labelled entity in utterance "${entityExistsInUtteranceLabel.text}". ${entityType} cannot be added with explicit labelled values in utterances.`
let error = BuildDiagnostic({
message: errorMsg
});
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
});
}
return entityRoles;
}
/**
* Helper function to recursively pull entities from parsed utterance text
* @param {parserEntity} list
* @param {Object} retObj {entitiesFound, utteranceWithoutEntityLabel}
* @param {number} parentIdx index where this list occurs in the parent
* @returns {string[]} resolved values to add to the parent list
* @throws {exception} Throws on errors. exception object includes errCode and text.
*/
const flattenLists = function (list, retObj, parentIdx) {
let retValue = []
if (list.entity !== undefined) list.entity = list.entity.trim();
if (list.role !== undefined) list.role = list.role.trim();
if (list.startPos !== undefined) list.startPos = parentIdx;
let offset = 0;
list.value.forEach((item, idx) => {
if (item instanceof helperClass.parserEntity) {
let valuesToInsert = flattenLists(item, retObj, offset + parentIdx);
if (valuesToInsert.length > 0) {
retValue = retValue.concat(valuesToInsert);
offset += valuesToInsert.length;
}
} else {
retValue.push(item);
if (item === ' ') {
if (idx !== 0 && idx !== (list.value.length - 1)) {
offset++;
}
} else {
offset++;
}
}
});
if (list.value.length === 0) {
list.type = LUISObjNameEnum.PATTERNANYENTITY;
if (list.role != '') {
retValue = `{${list.entity}:${list.role}}`.split('');
} else {
retValue = `{${list.entity}}`.split('');
}
} else {
list.type = LUISObjNameEnum.ENTITIES;
}
retValue = retValue.join('').trim();
if (list.endPos !== undefined) list.endPos = parentIdx + retValue.length - 1;
retObj.entitiesFound.push(new helperClass.parserEntity(undefined, list.startPos, list.entity, retValue, list.endPos, list.type, list.role));
return retValue.split('');
};
/**
* Helper function to add an item to collection if it does not exist
* @param {object} collection contents of the current collection
* @param {LUISObjNameEnum} type item type
* @param {object} value value of the current item to examine and add
* @returns {void} nothing
*/
const addItemIfNotPresent = function (collection, type, value) {
let hasValue = false;
for (let i in collection[type]) {
if (collection[type][i].name === value) {
hasValue = true;
break;
}
}
if (!hasValue) {
let itemObj = {};
itemObj.name = value;
if (type == LUISObjNameEnum.PATTERNANYENTITY) {
itemObj.explicitList = [];
}
if (type !== LUISObjNameEnum.INTENT) {
itemObj.roles = [];
}
collection[type].push(itemObj);
}
};
/**
* Helper function to add an item to collection if it does not exist
* @param {object} collection contents of the current collection
* @param {LUISObjNameEnum} type item type
* @param {object} value value of the current item to examine and add
* @param {string []} roles possible roles to add to the item
* @returns {void} nothing
*/
const addItemOrRoleIfNotPresent = function (collection, type, value, roles) {
let existingItem = collection[type].filter(item => item.name == value);
if (existingItem.length !== 0) {
// see if the role exists and if so, merge
mergeRoles(existingItem[0].roles, roles);
} else {
let itemObj = {};
itemObj.name = value;
if (type == LUISObjNameEnum.PATTERNANYENTITY) {
itemObj.explicitList = [];
}
if (type !== LUISObjNameEnum.INTENT) {
itemObj.roles = roles;
}
collection[type].push(itemObj);
}
}
/**
* Helper function merge roles
* @param {string []} srcEntityRoles contents of the current collection
* @param {string []} tgtEntityRoles target entity roles collection to merge
* @returns {void} nothing
*/
const mergeRoles = function (srcEntityRoles, tgtEntityRoles) {
const rolesMap = srcEntityRoles.reduce((map, role) => (map[role] = true, map), {});
tgtEntityRoles.forEach(role => {
if (!rolesMap[role]) {
srcEntityRoles.push(role);
}
});
}
/**
* Helper function that returns true if the item exists. Merges roles before returning
* @param {Object} collection contents of the current collection
* @param {string} entityName name of entity to look for in the current collection
* @param {string []} entityRoles target entity roles collection to merge
* @returns {void} nothing
*/
const itemExists = function (collection, entityName, entityRoles) {
let matchInClosedLists = helpers.filterMatch(collection, 'name', entityName);
if (matchInClosedLists.length !== 0) {
// merge roles if there are any roles in the pattern entity
if (entityRoles.length !== 0) {
mergeRoles(matchInClosedLists[0].roles, entityRoles);
}
return true;
}
return false;
}
module.exports = parseFileContentsModule; | Helper function to verify that the requested entity does not already exist
@param {parserObj} parsedContent parserObj containing current parsed content
@param {String} entityName
@param {String} entityType
@returns {String[]} Possible roles found to import into the explicitly defined entity type.
@throws {exception} Throws on errors. exception object includes errCode and text. | VerifyAndUpdateSimpleEntityCollection ( parsedContent , entityName , entityType ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parseFileContents.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parseFileContents.js | MIT |
constructor(parseTree) {
this.ParseTree = parseTree;
this.Name = this.ExtractName(parseTree);
const result = this.ExtractUtteranceAndEntitiesMap(parseTree);
this.UtteranceAndEntitiesMap = result.utteranceAndEntitiesMap;
this.Errors = result.errors;
} | @param {IntentDefinitionContext} parseTree | constructor ( parseTree ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/luIntent.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/luIntent.js | MIT |
constructor(parseTree) {
this.ParseTree = parseTree;
this.Description = this.ExtractDescription(parseTree);
let result = this.ExtractPath(parseTree);
this.Path = result.importPath;
this.Errors = result.errors;
} | @param {ImportDefinitionContext} parseTree | constructor ( parseTree ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/luImport.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/luImport.js | MIT |
translateContent: async function(program) {
let filesToParse = [];
let folderStat = '';
if(program.in) {
filesToParse.push(program.in);
}
// do we have a folder passed in?
if(program.lu_folder) {
try
{
folderStat = fs.statSync(program.lu_folder);
} catch (err) {
throw(new exception(retCode.errorCode.OUTPUT_FOLDER_INVALID, 'Sorry, ' + program.lu_folder + ' is not a folder or does not exist'));
}
if(!folderStat.isDirectory()) {
throw(new exception(retCode.errorCode.OUTPUT_FOLDER_INVALID, 'Sorry, ' + program.lu_folder + ' is not a folder or does not exist'));
}
if(program.subfolder) {
filesToParse = helpers.findLUFiles(program.lu_folder, true);
} else {
filesToParse = helpers.findLUFiles(program.lu_folder, false);
}
if(filesToParse.length === 0) {
throw(new exception(retCode.errorCode.NO_LU_FILES_FOUND, 'Sorry, no .lu files found in the specified folder.'));
}
}
// is there an output folder?
let outFolder = process.cwd();
if(program.out_folder) {
if(path.isAbsolute(program.out_folder)) {
outFolder = program.out_folder;
} else {
outFolder = path.resolve('', program.out_folder);
}
if(!fs.existsSync(outFolder)) {
throw(new exception(retCode.errorCode.OUTPUT_FOLDER_INVALID, 'Output folder ' + outFolder + ' does not exist'));
}
}
while(filesToParse.length > 0) {
let file = filesToParse[0];
try {
await parseFile(file, outFolder, program.translate_key, program.to_lang, program.src_lang, program.translate_comments, program.translate_link_text, program.verbose, program.batch_translate, program.lu_file);
} catch (err) {
throw(err);
}
filesToParse.splice(0,1);
}
} | Helper function to parse, translate and write out localized lu files
@param {object} program parsed program object from commander
@returns {void} nothing
@throws {exception} Throws on errors. exception object includes errCode and text. | translateContent ( program ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/translate.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/translate.js | MIT |
async function parseFile(file, outFolder, translate_key, to_lang, src_lang, translate_comments, translate_link_text, log, batch_translate, lu_file) {
let fileName = lu_file ? lu_file : path.basename(file);
if(!fs.existsSync(path.resolve(file))) {
throw(new exception(retCode.errorCode.FILE_OPEN_ERROR, 'Sorry unable to open [' + file + ']'));
}
let fileContent = txtfile.readSync(file);
if (!fileContent) {
throw(new exception(retCode.errorCode.FILE_OPEN_ERROR, 'Sorry, error reading file:' + file));
}
if(log) process.stdout.write(chalk.default.whiteBright('Parsing file: ' + file + '\n'));
let parsedLocContent = '';
// Support multi-language specification for targets.
// Accepted formats are space or comma separated list of target language codes.
// Tokenize to_lang
let toLang = to_lang.split(/[, ]/g);
for (idx in toLang) {
let tgt_lang = toLang[idx].trim();
if (tgt_lang === '') continue;
try {
parsedLocContent = await translateHelpers.parseAndTranslate(fileContent, translate_key, tgt_lang, src_lang, translate_comments, translate_link_text, log, batch_translate)
} catch (err) {
throw(err);
}
if (!parsedLocContent) {
throw(new exception(retCode.errorCode.INVALID_INPUT_FILE, 'Sorry, file : ' + file + 'had invalid content'));
} else {
// write out file
loutFolder = path.join(outFolder, tgt_lang);
try
{
fs.mkdirSync(loutFolder);
} catch(exception) {
if(exception.code != 'EEXIST') {
throw(new exception(retCode.errorCode.UNABLE_TO_WRITE_FILE, 'Unable to create folder - ' + exception));
}
}
let outFileName = path.join(loutFolder, fileName);
try {
fs.writeFileSync(outFileName, parsedLocContent, 'utf-8');
} catch (err) {
throw(new exception(retCode.errorCode.UNABLE_TO_WRITE_FILE, 'Unable to write LU file - ' + outFileName));
}
if(log) process.stdout.write(chalk.default.italic('Successfully wrote to ' + outFileName + '\n\n'));
}
}
}
module.exports = translateModule; | Helper function to parse, translate and write out localized lu files
@param {string} file file name
@param {string} outFolder output folder path
@param {string} translate_key translate text API key
@param {string} to_lang language code to translate content to
@param {string} src_lang language code for source content
@param {boolean} translate_comments translate comments in .lu files if this is set to true
@param {boolean} translate_link_text translate URL or LU reference link text in .lu files if this is set to true
@param {boolean} log indicates if this function should write verbose messages to process.stdout
@param {number} batch_translate indicates number of input lines to batch up before calling translation API
@param {string} lu_file output file name requested
@returns {void} nothing
@throws {exception} Throws on errors. exception object includes errCode and text. | parseFile ( file , outFolder , translate_key , to_lang , src_lang , translate_comments , translate_link_text , log , batch_translate , lu_file ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/translate.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/translate.js | MIT |
const get_guid = function () {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
} | Helper function to create a random guid
@returns {string} GUID | get_guid ( ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/translate-helpers.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/translate-helpers.js | MIT |
constructor(intents, entities, imports, qnas, modelInfos, errors) {
this.Intents = intents;
this.Entities = entities;
this.Imports = imports;
this.Qnas = qnas;
this.Errors = errors;
this.ModelInfos = modelInfos;
} | @param {LUIntent[]} intents
@param {any[]} entities
@param {any[]} imports
@param {any[]} qnas
@param {any[]} errors
@param {any[]} modelInfos | constructor ( intents , entities , imports , qnas , modelInfos , errors ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/luResource.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/luResource.js | MIT |
handleFile: async function(program, cmd) {
let filesToParse;
try {
filesToParse = await getFilesToParse(program);
} catch (err) {
throw (err);
}
let rootFile = filesToParse[0];
let allParsedContent = '';
try {
allParsedContent = await parseAllFiles(filesToParse, program.verbose, program.luis_culture);
} catch (err) {
throw (err);
}
// resolve uttereance deep references
await resolveReferencesInUtterances(allParsedContent);
let finalLUISJSON, finalQnAJSON, finalQnAAlterations;
try {
// pass only files that need to be collated.
finalLUISJSON = await parseFileContents.collateLUISFiles(allParsedContent.LUISContent.filter(item => item.includeInCollate));
if(haveLUISContent(finalLUISJSON)) await parseFileContents.validateLUISBlob(finalLUISJSON);
finalQnAJSON = await parseFileContents.collateQnAFiles(allParsedContent.QnAContent.filter(item => item.includeInCollate));
finalQnAAlterations = await parseFileContents.collateQnAAlterations(allParsedContent.QnAAlterations.filter(item => item.includeInCollate));
} catch (err) {
throw (err);
}
try {
writeOutFiles(program,finalLUISJSON,finalQnAJSON, finalQnAAlterations, rootFile, cmd);
} catch (err) {
throw(err);
}
} | Handle parsing the root file that was passed in command line args
@param {object} program Content flushed out by commander
@param {cmdEnum} cmd Parse to either LUIS or QnA
@returns {void} Nothing
@throws {exception} Throws on errors. exception object includes errCode and text. | handleFile ( program , cmd ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
const getOutputFolder = function(program) {
let outFolder = process.cwd();
if(program.out_folder) {
if(path.isAbsolute(program.out_folder)) {
outFolder = program.out_folder;
} else {
outFolder = path.resolve('', program.out_folder);
}
if(!fs.existsSync(outFolder)) {
throw(new exception(retCode.errorCode.NO_LU_FILES_FOUND, 'Output folder ' + outFolder + ' does not exist'));
}
}
return outFolder;
} | Helper function to get output folder
@param {object} program Parsed program object from commander
@returns {string} Output folder
@throws {exception} Throws on errors. exception object includes errCode and text. | getOutputFolder ( program ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
const getFilesToParse = async function(program) {
let filesToParse = [];
if(program.in) {
filesToParse.push(program.in);
}
if(program.lu_folder) {
// is this a folder?
try
{
var folderStat = fs.statSync(program.lu_folder);
} catch (err) {
throw(new exception(retCode.errorCode.OUTPUT_FOLDER_INVALID, 'Sorry, ' + program.lu_folder + ' is not a folder or does not exist'));
}
if(!folderStat.isDirectory()) {
throw(new exception(retCode.errorCode.OUTPUT_FOLDER_INVALID, 'Sorry, ' + program.lu_folder + ' is not a folder or does not exist'));
}
if(program.subfolder) {
filesToParse = helpers.findLUFiles(program.lu_folder, true);
} else {
filesToParse = helpers.findLUFiles(program.lu_folder, false);
}
if(filesToParse.length === 0) {
throw(new exception(retCode.errorCode.NO_LU_FILES_FOUND, 'Sorry, no .lu files found in the specified folder.'));
}
}
return filesToParse;
} | Helper function to get list of lu files to parse
@param {object} program Parsed program object from commander
@returns {Array} Array of .lu files found to parse
@throws {exception} Throws on errors. exception object includes errCode and text. | getFilesToParse ( program ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
const parseAllFiles = async function(filesToParse, log, luis_culture) {
let parsedContent = '';
let allParsedLUISContent = [];
let allParsedQnAContent = [];
let allParsedAlterationsContent = [];
filesToParse = filesToParseClass.stringArrayToFileToParseList(filesToParse);
let parsedFiles = [];
while (filesToParse.length > 0) {
let file = filesToParse[0].filePath;
// skip this file if we have parsed it already
if (parsedFiles.includes(file)) {
filesToParse.splice(0,1)
continue;
}
if(!fs.existsSync(path.resolve(file))) {
let error = BuildDiagnostic({
message: `Sorry unable to open [${file}]`
});
throw(new exception(retCode.errorCode.FILE_OPEN_ERROR, error.toString()));
}
let fileContent = txtfile.readSync(file);
if (!fileContent) {
let error = BuildDiagnostic({
message: `Sorry, error reading file: ${file}`
});
throw(new exception(retCode.errorCode.FILE_OPEN_ERROR, error.toString()));
}
if(log) process.stdout.write(chalk.default.whiteBright('Parsing file: ' + file + '\n'));
try {
parsedContent = await parseFileContents.parseFile(fileContent, log, luis_culture);
} catch (err) {
throw(err);
}
if (!parsedContent) {
let error = BuildDiagnostic({
message: `Sorry, file ${file} had invalid content`
});
throw(new exception(retCode.errorCode.INVALID_INPUT_FILE, error.toString()));
}
parsedFiles.push(file);
try {
if (haveLUISContent(parsedContent.LUISJsonStructure) && await parseFileContents.validateLUISBlob(parsedContent.LUISJsonStructure)) allParsedLUISContent.push(parserObject.create(parsedContent.LUISJsonStructure, undefined, undefined, file, filesToParse[0].includeInCollate));
} catch (err) {
throw (err);
}
allParsedQnAContent.push(parserObject.create(undefined, parsedContent.qnaJsonStructure, undefined, file, filesToParse[0].includeInCollate));
allParsedAlterationsContent.push(parserObject.create(undefined, undefined, parsedContent.qnaAlterations, file, filesToParse[0].includeInCollate));
// remove this file from the list
let parentFile = filesToParse.splice(0,1);
let parentFilePath = path.parse(path.resolve(parentFile[0].filePath)).dir;
// add additional files to parse to the list
if(parsedContent.additionalFilesToParse.length > 0) {
parsedContent.additionalFilesToParse.forEach(function(file) {
// Support wild cards at the end of a relative .LU file path.
// './bar/*' should look for all .lu files under the specified folder.
// './bar/**' should recursively look for .lu files under sub-folders as well.
if(file.filePath.endsWith('*')) {
const isRecursive = file.filePath.endsWith('**');
const rootFolder = file.filePath.replace(/\*/g, '');
let rootPath = rootFolder;
if(!path.isAbsolute(rootFolder)) {
rootPath = path.resolve(parentFilePath, rootFolder);
}
// Get LU files in this location
const luFilesToAdd = helpers.findLUFiles(rootPath, isRecursive);
if(luFilesToAdd.length !== 0) {
// add these to filesToParse
luFilesToAdd.forEach(addFile => filesToParse.push(new filesToParseClass(addFile, file.includeInCollate)));
}
} else {
if(!path.isAbsolute(file.filePath)) file.filePath = path.resolve(parentFilePath, file.filePath);
// avoid parsing files that have been parsed already
if(parsedFiles.includes(file.filePath)) {
// find matching parsed files and ensure includeInCollate is updated if needed.
updateParsedFiles(allParsedLUISContent, allParsedQnAContent, allParsedAlterationsContent, file);
} else {
filesToParse.push(new filesToParseClass(file.filePath, file.includeInCollate));
}
}
});
}
}
return {
LUISContent: allParsedLUISContent,
QnAContent: allParsedQnAContent,
QnAAlterations: allParsedAlterationsContent
};
} | Helper function to loop through and parse all files
@param {Array} filesToParse List of input .lu files to parse
@param {boolean} log If true, write verbose log messages to stdout
@param {string} luis_culture LUIS language code
@returns {object} Object cotaining arrays of all parsed LUIS and QnA content found in the files
@throws {exception} Throws on errors. exception object includes errCode and text. | parseAllFiles ( filesToParse , log , luis_culture ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
const updateParsedFiles = function(allParsedLUISContent, allParsedQnAContent, allParsedAlterationsContent, file) {
// find the instance and ensure includeInCollate property is set correctly
let matchInLUIS = allParsedLUISContent.find(item => item.srcFile == file.filePath);
if(matchInLUIS && (matchInLUIS.includeInCollate === false && file.includeInCollate === true)) matchInLUIS.includeInCollate = true;
let matchInQnA = allParsedQnAContent.find(item => item.srcFile == file.filePath);
if(matchInQnA && (matchInQnA.includeInCollate === false && file.includeInCollate === true)) matchInQnA.includeInCollate = true;
let matchInAlterations = allParsedAlterationsContent.find(item => item.srcFile == file.filePath);
if(matchInAlterations && (matchInAlterations.includeInCollate === false && file.includeInCollate === true)) matchInAlterations.includeInCollate = true;
} | Helper function to update parsed files to include in collate
@param {Object} allParsedLUISContent
@param {Object} allParsedQnAContent
@param {Object} allParsedAlterationsContent
@param {Object} file | updateParsedFiles ( allParsedLUISContent , allParsedQnAContent , allParsedAlterationsContent , file ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
const haveLUISContent = function(blob) {
if(!blob) return false;
return ((blob[LUISObjNameEnum.INTENT].length > 0) ||
(blob[LUISObjNameEnum.ENTITIES].length > 0) ||
(blob[LUISObjNameEnum.CLOSEDLISTS].length > 0) ||
(blob[LUISObjNameEnum.PATTERNANYENTITY].length > 0) ||
(blob.patterns.length > 0) ||
(blob[LUISObjNameEnum.UTTERANCE].length > 0) ||
(blob.prebuiltEntities.length > 0) ||
(blob[LUISObjNameEnum.REGEX].length > 0) ||
(blob.model_features.length > 0) ||
(blob.composites.length > 0));
}; | Helper function to see if we have any luis content in the blob
@param {object} blob Contents of parsed luis blob
@returns {boolean} true if there is any luis content in the blob | haveLUISContent ( blob ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
const tagListEntities = function(blob) {
if(!blob || typeof blob != "object") return null;
if(!blob.closedLists || blob.closedLists.length == 0) return blob;
blob.closedLists.forEach(closedListItem => {
(closedListItem.subLists || []).forEach(subListItem => {
(subListItem.list || []).forEach(synonym => {
// go through all utterances and add this list entity if this synonym matchnes
let matchedUtternces = (blob.utterances || []).filter(function(item) {
return item.text.includes(synonym);
});
(matchedUtternces || []).forEach(utterance => {
// there could be more than one match. so just split the utterance
let splitBySynonym = utterance.text.split(synonym);
let endPos = 0;
let startPos = 0;
for(var idx = 0; idx < splitBySynonym.length - 1; idx++) {
startPos = endPos + splitBySynonym[idx].length;
endPos += splitBySynonym[idx].length + synonym.length;
utterance.entities.push({'entity': closedListItem.name, 'startPos' : startPos, 'endPos' : endPos});
}
})
})
})
});
return blob;
} | Helper function that identifies and tags uttearnces with list entities.
Used before writing out batch test output
@param {Object} blob Contents of parsed luis blob
@returns {Object} Updated blob that includes entity labels for lists | tagListEntities ( blob ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
const resolveReferencesInUtterances = async function(allParsedContent) {
// find LUIS utterances that have references
(allParsedContent.LUISContent || []).forEach(luisModel => {
if (!luisModel.includeInCollate) return;
let newUtterancesToAdd = [];
let newPatternsToAdd = [];
let spliceList = [];
(luisModel.LUISJsonStructure.utterances || []).forEach((utterance,idx) => {
// Deep references must have [link name](link-value) notation
if (utterance.text.indexOf('[') !== 0) return;
// does this utterance have a deep link uri?
let linkExp = (utterance.text || '').trim().match(new RegExp(/\(.*?\)/g));
if (linkExp && linkExp.length !== 0) {
// we have stuff to parse and resolve
let parsedUtterance = helpers.parseLinkURI(utterance.text);
if (!path.isAbsolute(parsedUtterance.luFile)) parsedUtterance.luFile = path.resolve(path.dirname(luisModel.srcFile), parsedUtterance.luFile);
// see if we are in need to pull LUIS or QnA utterances
if (parsedUtterance.ref.endsWith('?')) {
if( parsedUtterance.luFile.endsWith('*')) {
let parsedQnABlobs = (allParsedContent.QnAContent || []).filter(item => item.srcFile.includes(parsedUtterance.luFile.replace(/\*/g, '')));
if(parsedQnABlobs === undefined) {
let error = BuildDiagnostic({
message: `Unable to parse ${utterance.text} in file: ${luisModel.srcFile}`
});
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
parsedQnABlobs.forEach(blob => blob.qnaJsonStructure.qnaList.forEach(item => item.questions.forEach(question => newUtterancesToAdd.push(new hClasses.uttereances(question, utterance.intent)))));
} else {
// look for QnA
let parsedQnABlob = (allParsedContent.QnAContent || []).find(item => item.srcFile == parsedUtterance.luFile);
if(parsedQnABlob === undefined) {
let error = BuildDiagnostic({
message: `Unable to parse ${utterance.text} in file: ${luisModel.srcFile}`
});
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
// get questions list from .lu file and update list
parsedQnABlob.qnaJsonStructure.qnaList.forEach(item => item.questions.forEach(question => newUtterancesToAdd.push(new hClasses.uttereances(question, utterance.intent))));
}
spliceList.push(idx);
} else {
// find the parsed file
let parsedLUISBlob = (allParsedContent.LUISContent || []).find(item => item.srcFile == parsedUtterance.luFile);
if(parsedLUISBlob === undefined) {
let error = BuildDiagnostic({
message: `Unable to parse ${utterance.text} in file: ${luisModel.srcFile}`
});
throw (new exception(retCode.errorCode.INVALID_INPUT, error.toString()));
}
let utterances = [], patterns = [];
if (parsedUtterance.ref.toLowerCase().includes('utterancesandpatterns')) {
// get all utterances and add them
utterances = parsedLUISBlob.LUISJsonStructure.utterances;
// Find all patterns and add them
(parsedLUISBlob.LUISJsonStructure.patterns || []).forEach(item => {
let newUtterance = new hClasses.uttereances(item.pattern, item.intent);
if (utterances.find(match => deepEqual(newUtterance, match)) !== undefined) utterances.push(new hClasses.uttereances(item.pattern, item.intent))
});
} else if (parsedUtterance.ref.toLowerCase().includes('utterances')) {
// get all utterances and add them
utterances = parsedLUISBlob.LUISJsonStructure.utterances;
} else if (parsedUtterance.ref.toLowerCase().includes('patterns')) {
// Find all patterns and add them
(parsedLUISBlob.LUISJsonStructure.patterns || []).forEach(item => utterances.push(new hClasses.uttereances(item.pattern, item.intent)));
} else {
// get utterance list from reference intent and update list
let referenceIntent = parsedUtterance.ref.replace(/-/g, ' ').trim();
utterances = parsedLUISBlob.LUISJsonStructure.utterances.filter(item => item.intent == referenceIntent);
// find and add any patterns for this intent
patterns = parsedLUISBlob.LUISJsonStructure.patterns.filter(item => item.intent == referenceIntent);
}
(utterances || []).forEach(item => newUtterancesToAdd.push(new hClasses.uttereances(item.text, utterance.intent)));
(patterns || []).forEach(item => newPatternsToAdd.push(new hClasses.pattern(item.pattern, utterance.intent)));
// remove this reference utterance from the list
spliceList.push(idx);
}
}
});
// remove reference utterances from the list. The spliceList needs to be sorted so splice will actually work.
spliceList.sort((a,b) => a-b).forEach((item, idx) => luisModel.LUISJsonStructure.utterances.splice((item - idx), 1));
// add new utterances to the list
newUtterancesToAdd.forEach(item => luisModel.LUISJsonStructure.utterances.push(item));
// add new patterns to the list
newPatternsToAdd.forEach(item => luisModel.LUISJsonStructure.patterns.push(item));
newPatternsToAdd.forEach(patternObject => {
if(patternObject.pattern.includes('{'))
{
let entityRegex = new RegExp(/\{(.*?)\}/g);
let entitiesFound = patternObject.pattern.match(entityRegex);
entitiesFound.forEach(function (entity) {
entity = entity.replace("{", "").replace("}", "");
let entityName = entity;
let roleName = '';
if (entity.includes(':')) {
// this is an entity with role
[entityName, roleName] = entity.split(':');
}
// insert the entity only if it does not already exist
let simpleEntityInMaster = luisModel.LUISJsonStructure.entities.find(item => item.name == entityName);
let compositeInMaster = luisModel.LUISJsonStructure.composites.find(item => item.name == entityName);
let listEntityInMaster = luisModel.LUISJsonStructure.closedLists.find(item => item.name == entityName);
let regexEntityInMaster = luisModel.LUISJsonStructure.regex_entities.find(item => item.name == entityName);
let prebuiltInMaster = luisModel.LUISJsonStructure.prebuiltEntities.find(item => item.name == entityName);
let paIdx = -1;
let patternAnyInMaster = luisModel.LUISJsonStructure.patternAnyEntities.find((item, idx) => {
if (item.name === entityName) {
paIdx = idx;
return true;
}
return false;
});
if (!simpleEntityInMaster &&
!compositeInMaster &&
!listEntityInMaster &&
!regexEntityInMaster &&
!prebuiltInMaster) {
if (!patternAnyInMaster) {
// add a pattern.any entity
if (roleName !== '') {
parseFileContents.addItemOrRoleIfNotPresent(luisModel.LUISJsonStructure, LUISObjNameEnum.PATTERNANYENTITY, entityName, [roleName])
} else {
parseFileContents.addItemIfNotPresent(luisModel.LUISJsonStructure, LUISObjNameEnum.PATTERNANYENTITY, entity);
}
} else {
// add the role if it does not exist already.
if (roleName !== '') {
!patternAnyInMaster.roles.includes(roleName) ? patternAnyInMaster.roles.push(roleName) : undefined;
}
}
} else {
// we found this pattern.any entity as another type.
if (patternAnyInMaster && paIdx !== -1) {
// remove the patternAny entity from the list because it has been explicitly defined elsewhere.
luisModel.LUISJsonStructure.patternAnyEntities.splice(paIdx, 1);
}
}
});
}
})
});
}
/**
* Helper function to update parsed files to include in collate
* @param {Object} allParsedLUISContent
* @param {Object} allParsedQnAContent
* @param {Object} allParsedAlterationsContent
* @param {Object} file
*/
const updateParsedFiles = function(allParsedLUISContent, allParsedQnAContent, allParsedAlterationsContent, file) {
// find the instance and ensure includeInCollate property is set correctly
let matchInLUIS = allParsedLUISContent.find(item => item.srcFile == file.filePath);
if(matchInLUIS && (matchInLUIS.includeInCollate === false && file.includeInCollate === true)) matchInLUIS.includeInCollate = true;
let matchInQnA = allParsedQnAContent.find(item => item.srcFile == file.filePath);
if(matchInQnA && (matchInQnA.includeInCollate === false && file.includeInCollate === true)) matchInQnA.includeInCollate = true;
let matchInAlterations = allParsedAlterationsContent.find(item => item.srcFile == file.filePath);
if(matchInAlterations && (matchInAlterations.includeInCollate === false && file.includeInCollate === true)) matchInAlterations.includeInCollate = true;
}
/**
* Helper function to see if we have any luis content in the blob
* @param {object} blob Contents of parsed luis blob
* @returns {boolean} true if there is any luis content in the blob
*/
const haveLUISContent = function(blob) {
if(!blob) return false;
return ((blob[LUISObjNameEnum.INTENT].length > 0) ||
(blob[LUISObjNameEnum.ENTITIES].length > 0) ||
(blob[LUISObjNameEnum.CLOSEDLISTS].length > 0) ||
(blob[LUISObjNameEnum.PATTERNANYENTITY].length > 0) ||
(blob.patterns.length > 0) ||
(blob[LUISObjNameEnum.UTTERANCE].length > 0) ||
(blob.prebuiltEntities.length > 0) ||
(blob[LUISObjNameEnum.REGEX].length > 0) ||
(blob.model_features.length > 0) ||
(blob.composites.length > 0));
};
/**
* Helper function that identifies and tags uttearnces with list entities.
* Used before writing out batch test output
* @param {Object} blob Contents of parsed luis blob
* @returns {Object} Updated blob that includes entity labels for lists
*/
const tagListEntities = function(blob) {
if(!blob || typeof blob != "object") return null;
if(!blob.closedLists || blob.closedLists.length == 0) return blob;
blob.closedLists.forEach(closedListItem => {
(closedListItem.subLists || []).forEach(subListItem => {
(subListItem.list || []).forEach(synonym => {
// go through all utterances and add this list entity if this synonym matchnes
let matchedUtternces = (blob.utterances || []).filter(function(item) {
return item.text.includes(synonym);
});
(matchedUtternces || []).forEach(utterance => {
// there could be more than one match. so just split the utterance
let splitBySynonym = utterance.text.split(synonym);
let endPos = 0;
let startPos = 0;
for(var idx = 0; idx < splitBySynonym.length - 1; idx++) {
startPos = endPos + splitBySynonym[idx].length;
endPos += splitBySynonym[idx].length + synonym.length;
utterance.entities.push({'entity': closedListItem.name, 'startPos' : startPos, 'endPos' : endPos});
}
})
})
})
});
return blob;
}
module.exports = parser; | Helper function to resolve lu file references in utterances
@param {Object} allParsedContent
@returns {void} Nothing
@throws {exception} Throws on errors. exception object includes errCode and text. | resolveReferencesInUtterances ( allParsedContent ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/parser.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/parser.js | MIT |
static visitNormalIntentStringContext(ctx) {
let utterance = '';
let entities = [];
let errorMsgs = [];
for (const node of ctx.children) {
const innerNode = node;
switch (innerNode.symbol.type) {
case lp.DASH: break;
case lp.EXPRESSION: {
let result = this.extractEntityFromUtterence(innerNode.getText());
let entityObjects = result.entities;
errorMsgs = errorMsgs.concat(result.errorMsgs);
if (entityObjects[entityObjects.length - 1].entityValue !== undefined) {
// simple entitiy
utterance = utterance.concat(entityObjects[entityObjects.length - 1].entityValue);
for (const entityObject of entityObjects) {
let startPos = utterance.lastIndexOf(entityObject.entityValue);
let endPos = startPos + entityObject.entityValue.length - 1;
entities.push({
type: LUISObjNameEnum.ENTITIES,
entity: entityObject.entityName,
role: entityObject.role,
startPos: startPos,
endPos: endPos
});
}
} else {
// pattern.any entity
utterance = utterance.concat(innerNode.getText());
entities.push({
type: LUISObjNameEnum.PATTERNANYENTITY,
entity: entityObjects[0].entityName,
role: entityObjects[0].role
})
}
break;
}
default: {
utterance = utterance.concat(innerNode.getText());
break;
}
}
}
return { utterance, entities, errorMsgs };
} | @param {lp.NormalIntentStringContext} ctx
@returns {object} | visitNormalIntentStringContext ( ctx ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/visitor.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/visitor.js | MIT |
static extractEntityFromUtterence(exp) {
let entities = [];
let errorMsgs = [];
exp = exp.substring(1, exp.length - 1).trim();
let equalIndex = exp.indexOf('=');
if (equalIndex !== -1) {
// entity with labelled value
let entityName = exp.substring(0, equalIndex).trim();
let entityValue = exp.substring(equalIndex + 1).trim();
let updatedEntityValue = entityValue;
let compositeEntityRightIndex = updatedEntityValue.indexOf('}');
let compositeEntityLeftIndex = updatedEntityValue.substring(0, compositeEntityRightIndex).lastIndexOf('{');
while (compositeEntityLeftIndex > -1 && compositeEntityRightIndex > compositeEntityLeftIndex) {
// composite entities
let compositeEntityDefinition = updatedEntityValue.substring(compositeEntityLeftIndex + 1, compositeEntityRightIndex).trim();
let compositeEntityEqualIndex = compositeEntityDefinition.indexOf('=');
if (compositeEntityEqualIndex !== -1) {
let compositeEntityName = compositeEntityDefinition.substring(0, compositeEntityEqualIndex).trim();
let compositeEntityValue = compositeEntityDefinition.substring(compositeEntityEqualIndex + 1).trim();
entities.push({ entityName: compositeEntityName, entityValue: compositeEntityValue });
updatedEntityValue = updatedEntityValue.substring(0, compositeEntityLeftIndex) + compositeEntityValue + updatedEntityValue.substring(compositeEntityRightIndex + 1);
compositeEntityRightIndex = updatedEntityValue.indexOf('}');
compositeEntityLeftIndex = updatedEntityValue.substring(0, compositeEntityRightIndex).lastIndexOf('{');
} else {
errorMsgs.push(`Composite entity "${entityName}" includes pattern.any entity "${compositeEntityDefinition}".\r\n\tComposites cannot include pattern.any entity as a child.`)
break;
}
}
entities.push({ entityName: entityName, entityValue: updatedEntityValue });
entities.forEach(entity => {
let colonIndex = entity.entityName.indexOf(':');
if (colonIndex !== -1) {
let entityName = entity.entityName.substring(0, colonIndex).trim();
let roleName = entity.entityName.substring(colonIndex + 1).trim();
entity.entityName = entityName;
entity.role = roleName;
}
});
} else {
// pattern.any entity
let colonIndex = exp.indexOf(':');
if (colonIndex !== -1) {
let entityName = exp.substring(0, colonIndex).trim();
let roleName = exp.substring(colonIndex + 1).trim();
entities.push({ entityName: entityName, role: roleName });
} else {
let entityName = exp.trim();
entities.push({ entityName: entityName });
}
}
return { entities, errorMsgs };
} | @param {string} exp
@returns {object} | extractEntityFromUtterence ( exp ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/visitor.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/visitor.js | MIT |
constructor(parseTree) {
this.ParseTree = parseTree;
this.Questions = [this.ExtractQuestion(parseTree)];
this.Questions = this.Questions.concat(this.ExtractMoreQuestions(parseTree));
this.FilterPairs = this.ExtractFilterPairs(parseTree);
this.Answer = this.ExtractAnswer(parseTree);
} | @param {QnaDefinitionContext} parseTree | constructor ( parseTree ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/luQna.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/luQna.js | MIT |
sanitizeNewLines(fileContent) {
return fileContent.replace(ANY_NEWLINE, NEWLINE);
}, | @param {string} fileContent string content of file may contain any new line chars.
@returns {string} string content of file using current OS new line char | sanitizeNewLines ( fileContent ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/helpers.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/helpers.js | MIT |
findLUFiles: function(inputFolder, getSubFolders) {
let results = [];
const luExt = '.lu';
fs.readdirSync(inputFolder).forEach(function(dirContent) {
dirContent = path.resolve(inputFolder,dirContent);
if(getSubFolders && fs.statSync(dirContent).isDirectory()) {
results = results.concat(helpers.findLUFiles(dirContent, getSubFolders));
}
if(fs.statSync(dirContent).isFile()) {
if(dirContent.endsWith(luExt)) {
results.push(dirContent);
}
}
});
return results;
}, | Helper function to recursively get all .lu files
@param {string} inputfolder input folder name
@param {boolean} getSubFolder indicates if we should recursively look in sub-folders as well
@returns {Array} Array of .lu files found | findLUFiles ( inputFolder , getSubFolders ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/helpers.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/helpers.js | MIT |
constructor(parseTree) {
this.ParseTree = parseTree;
this.Name = this.ExtractName(parseTree);
this.Type = this.ExtractType(parseTree);
const result = this.ExtractSynonymsOrPhraseList(parseTree);
this.SynonymsOrPhraseList = result.synonymsOrPhraseList;
this.Errors = result.errors;
} | @param {EntityDefinitionContext} parseTree | constructor ( parseTree ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/luEntity.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/luEntity.js | MIT |
constructModelDescFromLUISJSON : async function(LUISJSON) {
let modelDesc = NEWLINE;
modelDesc += '> LUIS application information' + NEWLINE;
modelDesc += '> !# @app.name = ' + LUISJSON.name + NEWLINE;
modelDesc += '> !# @app.desc = ' + LUISJSON.desc + NEWLINE;
modelDesc += '> !# @app.culture = ' + LUISJSON.culture + NEWLINE;
modelDesc += '> !# @app.versionId = ' + LUISJSON.versionId + NEWLINE;
modelDesc += '> !# @app.luis_schema_version = ' + LUISJSON.luis_schema_version + NEWLINE;
return modelDesc;
}, | Helper function to construct model description information from LUIS JSON
@param {Object} LUISJSON
@returns {string} model description | constructModelDescFromLUISJSON ( LUISJSON ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/toLU-helpers.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/toLU-helpers.js | MIT |
constructModelDescFromQnAJSON : async function(QnAJSON) {
let modelDesc = NEWLINE;
modelDesc += '> QnA KB information' + NEWLINE;
modelDesc += '> !# @kb.name = ' + QnAJSON.name + NEWLINE;
return modelDesc;
}, | Helper function to construct model description information from QnA JSON
@param {Object} QnAJSON
@returns {string} model description | constructModelDescFromQnAJSON ( QnAJSON ) | javascript | microsoft/botbuilder-tools | packages/Ludown/lib/toLU-helpers.js | https://github.com/microsoft/botbuilder-tools/blob/master/packages/Ludown/lib/toLU-helpers.js | MIT |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.