self.getSchemaOptions = function(doc, name) { if (!(doc && doc.type)) { // Probably an area or singleton helper call without a // doc object, we cannot find a schema, but nor should // we crash return {}; } if ((!name) || ((typeof name) !== 'string')) { // Probably an area or singleton helper call without a // doc object, we cannot find a schema, but nor should // we crash return {}; } var manager = self.apos.docs.getManager(doc.type); if (!manager) { // This happens if we try to find the schema options for an area in // a widget or something else that isn't a top level doc type, or // the projection did not include type. // // TODO: a better solution to the entire option-forwarding problem? -Tom return {}; } var schema = manager.schema; var field = _.find(schema, 'name', name); if (!(field && field.options)) { return {}; } return field.options; };
self.getFileGroup = function(extension) { return _.find(self.fileGroups, function(group) { var candidate = group.extensionMaps[extension] || extension; if (_.contains(group.extensions, candidate)) { return true; } }); };
_.each(archiversInfoByArchiveIdentity, function(archiversInfo, archiveIdentity) { var archiveWmd = _.find(wmds, function(wmd){ return wmd.prototype.identity === archiveIdentity; }); if (!archiveWmd) { throw new Error('Invalid `archiveModelIdentity` setting. A model declares `archiveModelIdentity: \''+archiveIdentity+'\'`, but there\'s no other model actually registered with that identity to use as an archive!'); } // Validate that this archive model can be used for the purpose of Waterline's .archive() // > (note that the error messages here should be considerate of the case where someone is // > upgrading their app from an older version of Sails/Waterline and might happen to have // > a model named "Archive".) var EXPECTED_ATTR_NAMES = ['id', 'createdAt', 'fromModel', 'originalRecord', 'originalRecordId']; var actualAttrNames = _.keys(archiveWmd.prototype.attributes); var namesOfMissingAttrs = _.difference(EXPECTED_ATTR_NAMES, actualAttrNames); try { if (namesOfMissingAttrs.length > 0) { throw flaverr({ code: 'E_INVALID_ARCHIVE_MODEL', because: 'it is missing '+ namesOfMissingAttrs.length+' mandatory attribute'+(namesOfMissingAttrs.length===1?'':'s')+': '+namesOfMissingAttrs+'.' }); }//• if (archiveWmd.prototype.primaryKey !== 'id') { throw flaverr({ code: 'E_INVALID_ARCHIVE_MODEL', because: 'it is using an attribute other than `id` as its logical primary key attribute.' }); }//• if (_.any(EXPECTED_ATTR_NAMES, { encrypt: true })) { throw flaverr({ code: 'E_INVALID_ARCHIVE_MODEL', because: 'it is using at-rest encryption on one of its mandatory attributes, when it shouldn\'t be.' }); }//• // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUTURE: do more checks (there's a lot of things we should probably check-- e.g. the `type` of each // mandatory attribute, that no crazy defaultsTo is provided, that the auto-timestamp is correct, etc.) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - } catch (err) { switch (err.code) { case 'E_INVALID_ARCHIVE_MODEL': throw new Error( 'The `'+archiveIdentity+'` model cannot be used as a custom archive, because '+err.because+'\n'+ 'Please adjust this custom archive model accordingly, or otherwise switch to a different '+ 'model as your custom archive. (For reference, this `'+archiveIdentity+'` model this is currently '+ 'configured as the custom archive model for '+archiversInfo.archivers.length+' other '+ 'model'+(archiversInfo.archivers.length===1?'':'s')+': '+_.pluck(archiversInfo.archivers, 'identity')+'.' ); default: throw err; } } });//∞
cmd.action(function(){ if (program.args.length > 1 && _.isString(program.args[0])) { var helpCmd = _.find(program.commands, {_name: program.args[0]}); if (helpCmd) { helpCmd.help(); return; } } program.help(); });
Associations.Customerbelongs.createEach(records, function(err, _customers) { if (err) { return done(err); } customers = _customers; var fooCustomer = _.find(_customers, { name: 'foo'}); var barCustomer = _.find(_customers, { name: 'bar'}); var paymentRecords = [ { amount: 1, type: 'belongsTo find', customer: fooCustomer.id }, { amount: 2, type: 'belongsTo find', customer: barCustomer.id }, { amount: 3, type: 'empty payment' }, { amount: 4, customer: fooCustomer.id } ]; Associations.Paymentbelongs.createEach(paymentRecords, function(err, _payments) { if (err) { return done(err); } // Expose results for examination below payments = _payments; return done(); }); });
options.alterFields = function(schema) { var join = _.find(schema, { name: '_pieces' }); if (join) { join.relationshipsField = 'relationships'; join.relationship = [ // Cropping coordinates. // // These are nullable; if left is null there is no crop. { type: 'integer', def: null, name: 'left', label: 'Left' }, { type: 'integer', def: null, name: 'top', label: 'Top' }, { type: 'integer', def: null, name: 'width', label: 'Width' }, { type: 'integer', def: null, name: 'height', label: 'Height' }, // Focal point percentages. // These are nullable; if they are null there is no focal point. { type: 'float', def: null, name: 'x', label: 'X' }, { type: 'float', def: null, name: 'y', label: 'Y' } ]; } superAlterFields(schema); };
self.clone = function(req, source, callback) { var originalFile = '/attachments/' + source._id + '-' + source.name + '.' + source.extension; var tempFile = self.uploadfs.getTempPath() + '/' + self.apos.utils.generateId() + '.' + source.extension; var target = { _id: self.apos.utils.generateId(), length: source.length, group: source.group, createdAt: new Date(), name: source.name, title: source.title, extension: source.extension }; var copyIn; var group = _.find(self.fileGroups, 'name', source.group); if (group && group.image) { // TODO add clone capability for crops of an image // target.crops = source.crops; // target.crop = source.crop; target.width = source.width; target.height = source.height; target.landscape = source.landscape; target.portrait = source.portrait; copyIn = self.uploadfs.copyImageIn; } else { copyIn = self.uploadfs.copyIn; } return async.series([ function(callback) { // Get the source, place in tempfile return self.uploadfs.copyOut(originalFile, tempFile, callback); }, function(callback) { // Copy tempfile to target return copyIn(tempFile, '/attachments/' + target._id + '-' + target.name + '.' + target.extension, callback); }, function(callback) { // Update meta for target return self.db.insert(target, callback); } ], function(err) { fs.unlink(tempFile, function() { }); return callback(err, target); }); };
Object.keys(values).forEach(function(key) { // Check to see if this key is a foreign key var attribute = self.waterline.collections[model].schema[key]; // If not a plainObject, check if this is a model instance and has a toObject method if (!_.isPlainObject(values[key])) { if (_.isObject(values[key]) && !Array.isArray(values[key]) && values[key].toObject && typeof values[key].toObject === 'function') { values[key] = values[key].toObject(); } else { return; } } // Check that this user-specified value is not NULL if (values[key] === null) return; // Check that this user-specified value actually exists // as an attribute in `model`'s schema. // If it doesn't- just ignore it if (typeof attribute !== 'object') return; if (!hop(values[key], attribute.on)) return; // Look and see if the related model has a custom primary key AND that // the intended method is "create" var related = self.waterline.collections[attribute.references]; var relatedPK = _.find(related.attributes, { primaryKey: true }); // If a custom PK was used and it's not autoIncrementing and the record // is being created then go ahead and don't reduce it. This allows nested // creates to work when custom PK's are used. if (!relatedPK.autoIncrement && !related.autoPK && method && (method == 'create' || method == 'update')) { return; } // Otherwise reduce the association like normal var fk = values[key][attribute.on]; values[key] = fk; });
], function(err) { if (err) { return callback(err); } if (!info) { return callback('notfound'); } if (!self.croppable[info.extension]) { return callback(new Error(info.extension + ' files cannot be cropped, do not present cropping UI for this type')); } info.crops = info.crops || []; var existing = _.find(info.crops, crop); if (existing) { // We're done, this crop is already available return callback(null); } // Pull the original out of cloud storage to a temporary folder where // it can be cropped and popped back into uploadfs var originalFile = '/attachments/' + info._id + '-' + info.name + '.' + info.extension; var tempFile = self.uploadfs.getTempPath() + '/' + self.apos.utils.generateId() + '.' + info.extension; var croppedFile = '/attachments/' + info._id + '-' + info.name + '.' + crop.left + '.' + crop.top + '.' + crop.width + '.' + crop.height + '.' + info.extension; return async.series([ function(callback) { self.uploadfs.copyOut(originalFile, tempFile, callback); }, function(callback) { self.uploadfs.copyImageIn(tempFile, croppedFile, { crop: crop }, callback); }, function(callback) { info.crops.push(crop); self.db.update({ _id: info._id }, info, callback); } ], function(err) { // We're done with the temp file. We don't care if it was never created. fs.unlink(tempFile, function() { }); return callback(err); }); });
generic: function(req, permissions, verb, type) { if (verb === 'view') { return true; } if (permissions[verb]) { return true; } if (permissions[verb + '-' + type]) { return true; } if (_.find(self.impliedBy[verb] || [], function(implied) { if (permissions[implied]) { return true; } if (permissions[implied + '-' + type]) { return true; } })) { return true; } return false; },
return self.find(req, { _id: _id }).ancestors(true).permission('edit-apostrophe-page').toObject(function(err, page) { if (err) { self.apos.utils.error(err); return next('error'); } if (!page) { return next('notfound'); } var parentPage = page._ancestors && page._ancestors[0] && page._ancestors[page._ancestors.length - 1]; var schema = self.allowedSchema(req, page, parentPage); // Force slug and title to be in sync again, otherwise copying the homepage is problematic // Also makes sense to reset this 'preference' for the new copy var slugComponents = page.slug.split('/'); var currentSlugTitle = slugComponents.pop(); if (currentSlugTitle !== self.apos.utils.slugify(page.title)) { slugComponents[slugComponents.length - 1] = self.apos.utils.slugify(page.title); page.slug = slugComponents.join('/'); } // If no slug field, re-add it (happens when copying from Homepage) if (!_.find(schema, { name: 'slug' })) { schema.push({ name: 'slug', label: 'Slug', type: 'slug', required: true }); } // We modified it, we have to bless the new version self.apos.schemas.bless(req, schema); return next(null, { data: page, schema: schema }); });
it('should not be received by the sender', function (done){ var app = _.find(apps, function(app) {return parseInt(app.config.port) === parseInt(sockets[0].port);}); assert(!app._receivedMessageEvents['broadcast']); return done(); });
fn: function (inputs, exits) { // Dependencies var assert = require('assert'); var _ = require('@sailshq/lodash'); var processNativeRecord = require('./private/process-native-record'); var buildMongoWhereClause = require('./private/build-mongo-where-clause'); // Local var for the stage 3 query, for easier access. var s3q = inputs.query; if (s3q.meta && s3q.meta.logMongoS3Qs) { console.log('* * * * * *\nADAPTER (FIND RECORDS):',require('util').inspect(s3q,{depth:10}),'\n'); } // Local var for the `tableName`, for clarity. var tableName = s3q.using; // Grab the model definition var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); if (!WLModel) { return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); }//-• // ┌┬┐┌─┐┌┐┌┌─┐┌─┐┬┌─┐┬ ┬ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ // ││││ │││││ ┬│ ││├┤ └┬┘ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴└─┘┘└┘└─┘└─┘┴└ ┴ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ var db = inputs.connection; var mongoCollection = db.collection(tableName); // Build a Mongo-style WHERE from the `where` clause. var mongoWhere; try { mongoWhere = buildMongoWhereClause(s3q.criteria.where, WLModel, s3q.meta); } catch (e) { return exits.error(e); } // if (s3q.meta && s3q.meta.logMongoS3Qs) { // console.log('mongoWhere',require('util').inspect(mongoWhere,{depth:10})); // console.log('mongoWhere["$and"] && typeof mongoWhere["$and"][0].driver_taxis.in[0]',require('util').inspect(mongoWhere['$and'] && typeof mongoWhere['$and'][0].driver_taxis.$in[0],{depth:10})); // } // Transform the `sort` clause from a stage 3 query into a Mongo sort. var mongoSort = _.map(s3q.criteria.sort, function mapSort(s3qSortDirective) { var mongoSortDirective = []; var sortByKey = _.first(_.keys(s3qSortDirective)); mongoSortDirective.push(sortByKey); var sortDirection = s3qSortDirective[sortByKey]; assert(sortDirection === 'ASC' || sortDirection === 'DESC', 'At this point, the sort direction should always be ASC or DESC (capitalized). If you are seeing this message, there is probably a bug somewhere in your version of Waterline core.'); mongoSortDirective.push(sortDirection === 'ASC' ? 1 : -1); return mongoSortDirective; }); // Create the initial Mongo deferred, taking care of `where`, `limit`, and `sort`. var mongoDeferred; try { assert(_.isNumber(s3q.criteria.limit), 'At this point, the limit should always be a number, but instead it is `'+s3q.criteria.limit+'`. If you are seeing this message, there is probably a bug somewhere in your version of Waterline core.'); mongoDeferred = mongoCollection.find(mongoWhere).limit(s3q.criteria.limit); if (mongoSort.length) { mongoDeferred = mongoDeferred.sort(mongoSort); } } catch (err) { return exits.error(err); } // Add in `select` if necessary. // (note that `select` _could_ be undefined--i.e. when a model is `schema: false`) if (s3q.criteria.select) { // Transform the stage-3 query select array into a Mongo projection dictionary. var projection = _.reduce(s3q.criteria.select, function reduceProjection(memo, colName) { memo[colName] = 1; return memo; }, {}); mongoDeferred = mongoDeferred.project(projection); } // Add in skip if necessary. // (if it is zero, no reason to mess with mixing it in at all) if (s3q.criteria.skip) { mongoDeferred.skip(s3q.criteria.skip); } // ╔═╗╔═╗╔╦╗╔╦╗╦ ╦╔╗╔╦╔═╗╔═╗╔╦╗╔═╗ ┬ ┬┬┌┬┐┬ ┬ ┌┬┐┌┐ // ║ ║ ║║║║║║║║ ║║║║║║ ╠═╣ ║ ║╣ ││││ │ ├─┤ ││├┴┐ // ╚═╝╚═╝╩ ╩╩ ╩╚═╝╝╚╝╩╚═╝╩ ╩ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ─┴┘└─┘ // Find the documents in the db. mongoDeferred.toArray(function findCb(err, nativeResult) { if (err) { return exits.error(err); } // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┌─┐─┐ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │││├─┤ │ │└┐┌┘├┤ ├┬┘├┤ │ │ │├┬┘ │││ └─┐ │ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ ┴└─└─┘└─┘└─┘┴└──┴┘└─└─┘─┘ // Process records (mutate in-place) to wash away adapter-specific eccentricities. var phRecords = nativeResult; try { _.each(phRecords, function (phRecord){ processNativeRecord(phRecord, WLModel, s3q.meta); }); } catch (e) { return exits.error(e); } // if (s3q.meta && s3q.meta.logMongoS3Qs) { // console.log('found %d records',phRecords.length, require('util').inspect(phRecords,{depth:10}),'\n'); // } return exits.success(phRecords); }); // </ mongoDeferred.toArray() > }
module.exports = function parseBlueprintOptions(req) { // ███████╗███████╗████████╗██╗ ██╗██████╗ // ██╔════╝██╔════╝╚══██╔══╝██║ ██║██╔══██╗ // ███████╗█████╗ ██║ ██║ ██║██████╔╝ // ╚════██║██╔══╝ ██║ ██║ ██║██╔═══╝ // ███████║███████╗ ██║ ╚██████╔╝██║ // ╚══════╝╚══════╝ ╚═╝ ╚═════╝ ╚═╝ // If you're copying code from one of the sections in the switch statement below, // you'll probably also want to copy this setup code. // Set some defaults. var DEFAULT_LIMIT = 30; var DEFAULT_POPULATE_LIMIT = 30; // Get the name of the blueprint action being run. var blueprint = req.options.blueprintAction; // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌┬┐┌─┐┌┬┐┌─┐┬ // ├─┘├─┤├┬┘└─┐├┤ ││││ │ ││├┤ │ // ┴ ┴ ┴┴└─└─┘└─┘ ┴ ┴└─┘─┴┘└─┘┴─┘ // Get the model identity from the action name (e.g. 'user/find'). var model = req.options.action.split('/')[0]; if (!model) { throw new Error(util.format('No "model" specified in route options.')); } // Get the model class. var Model = req._sails.models[model]; if ( !Model ) { throw new Error(util.format('Invalid route option, "model".\nI don\'t know about any models named: `%s`',model)); } // ┌┬┐┌─┐┌─┐┌─┐┬ ┬┬ ┌┬┐ ┌─┐┌─┐┌─┐┬ ┬┬ ┌─┐┌┬┐┌─┐┌─┐ // ││├┤ ├┤ ├─┤│ ││ │ ├─┘│ │├─┘│ ││ ├─┤ │ ├┤ └─┐ // ─┴┘└─┘└ ┴ ┴└─┘┴─┘┴ ┴ └─┘┴ └─┘┴─┘┴ ┴ ┴ └─┘└─┘ // Get the default populates array var defaultPopulates = _.reduce(Model.associations, function(memo, association) { if (association.type === 'collection') { memo[association.alias] = { where: {}, limit: DEFAULT_POPULATE_LIMIT, skip: 0, select: [ '*' ], omit: [] }; } else { memo[association.alias] = {}; } return memo; }, {}); // Initialize the queryOptions dictionary we'll be returning. var queryOptions = { using: model, populates: defaultPopulates }; switch (blueprint) { // ███████╗██╗███╗ ██╗██████╗ ██╗ // ██╔════╝██║████╗ ██║██╔══██╗ ██╔╝ // █████╗ ██║██╔██╗ ██║██║ ██║ ██╔╝ // ██╔══╝ ██║██║╚██╗██║██║ ██║ ██╔╝ // ██║ ██║██║ ╚████║██████╔╝ ██╔╝ // ╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚═╝ // // ███████╗██╗███╗ ██╗██████╗ ██████╗ ███╗ ██╗███████╗ // ██╔════╝██║████╗ ██║██╔══██╗██╔═══██╗████╗ ██║██╔════╝ // █████╗ ██║██╔██╗ ██║██║ ██║██║ ██║██╔██╗ ██║█████╗ // ██╔══╝ ██║██║╚██╗██║██║ ██║██║ ██║██║╚██╗██║██╔══╝ // ██║ ██║██║ ╚████║██████╔╝╚██████╔╝██║ ╚████║███████╗ // ╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝ case 'find': case 'findOne': queryOptions.criteria = {}; // ┌─┐┌─┐┬─┐┌─┐┌─┐ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ // ├─┘├─┤├┬┘└─┐├┤ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴ ┴┴└─└─┘└─┘ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ queryOptions.criteria.where = (function getWhereCriteria(){ var where = {}; // For `findOne`, set "where" to just look at the primary key. if (blueprint === 'findOne') { where[Model.primaryKey] = req.param('id'); return where; } // Look for explicitly specified `where` parameter. where = req.allParams().where; // If `where` parameter is a string, try to interpret it as JSON. // (If it cannot be parsed, throw a UsageError.) if (_.isString(where)) { try { where = JSON.parse(where); } catch (e) { throw flaverr({ name: 'UsageError' }, new Error('Could not JSON.parse() the provided `where` clause. Here is the raw error: '+e.stack)); } }//>-• // If `where` has not been specified, but other unbound parameter variables // **ARE** specified, build the `where` option using them. if (!where) { // Prune params which aren't fit to be used as `where` criteria // to build a proper where query where = req.allParams(); // Omit built-in runtime config (like query modifiers) where = _.omit(where, ['limit', 'skip', 'sort', 'populate', 'select', 'omit']); // Omit any params that have `undefined` on the RHS. where = _.omit(where, function(p) { if (_.isUndefined(p)) { return true; } }); }//>- // Return final `where`. return where; })(); // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┌─┐┬ ┌─┐┌─┐┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ └─┐├┤ │ ├┤ │ │ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘└─┘┴─┘└─┘└─┘ ┴ if (!_.isUndefined(req.param('select'))) { queryOptions.criteria.select = req.param('select').split(',').map(function(attribute) {return attribute.trim();}); } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┌┬┐┬┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ │ │││││ │ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘┴ ┴┴ ┴ else if (!_.isUndefined(req.param('omit'))) { queryOptions.criteria.omit = req.param('omit').split(',').map(function(attribute) {return attribute.trim();}); } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┬ ┬┌┬┐┬┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ │ │││││ │ // ┴ ┴ ┴┴└─└─┘└─┘ ┴─┘┴┴ ┴┴ ┴ if (!_.isUndefined(req.param('limit'))) { queryOptions.criteria.limit = req.param('limit'); } else { queryOptions.criteria.limit = DEFAULT_LIMIT; } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┬┌─┬┌─┐ // ├─┘├─┤├┬┘└─┐├┤ └─┐├┴┐│├─┘ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘┴ ┴┴┴ if (!_.isUndefined(req.param('skip'))) { queryOptions.criteria.skip = req.param('skip'); } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┌─┐┬─┐┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ └─┐│ │├┬┘ │ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘└─┘┴└─ ┴ if (!_.isUndefined(req.param('sort'))) { queryOptions.criteria.sort = (function getSortCriteria() { var sort = req.param('sort'); if (_.isUndefined(sort)) {return undefined;} // If `sort` is a string, attempt to JSON.parse() it. // (e.g. `{"name": 1}`) if (_.isString(sort)) { try { sort = JSON.parse(sort); // If it is not valid JSON (e.g. because it's just some other string), // then just fall back to interpreting it as-is (e.g. "name ASC") } catch(unusedErr) {} } return sort; })(); } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┌─┐┌─┐┬ ┬┬ ┌─┐┌┬┐┌─┐ // ├─┘├─┤├┬┘└─┐├┤ ├─┘│ │├─┘│ ││ ├─┤ │ ├┤ // ┴ ┴ ┴┴└─└─┘└─┘ ┴ └─┘┴ └─┘┴─┘┴ ┴ ┴ └─┘ // If a `populate` param was sent, filter the attributes to populate // against that value. // e.g.: // /model?populate=alias1,alias2,alias3 // /model?populate=[alias1,alias2,alias3] if (req.param('populate')) { queryOptions.populates = (function getPopulates() { // Get the request param. var attributes = req.param('populate'); // If it's `false`, populate nothing. if (attributes === 'false') { return {}; } // Split the list on commas. attributes = attributes.split(','); // Trim whitespace off of the attributes. attributes = _.reduce(attributes, function(memo, attribute) { memo[attribute.trim()] = {}; return memo; }, {}); return attributes; })(); } break; // ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗ // ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝ // ██║ ██████╔╝█████╗ ███████║ ██║ █████╗ // ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝ // ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗ // ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ case 'create': // Set `fetch: true` queryOptions.meta = { fetch: true }; // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐ // ├─┘├─┤├┬┘└─┐├┤ └┐┌┘├─┤│ │ │├┤ └─┐ // ┴ ┴ ┴┴└─└─┘└─┘ └┘ ┴ ┴┴─┘└─┘└─┘└─┘ queryOptions.newRecord = (function getNewRecord(){ // Use all of the request params as values for the new record. var values = req.allParams(); // Attempt to JSON parse any collection attributes into arrays. This is to allow // setting collections using the shortcut routes. _.each(Model.attributes, function(attrDef, attrName) { if (attrDef.collection && (!req.body || !req.body[attrName]) && (req.query && _.isString(req.query[attrName]))) { try { values[attrName] = JSON.parse(req.query[attrName]); // If it is not valid JSON (e.g. because it's just a normal string), // then fall back to interpreting it as-is } catch(unusedErr) {} } }); return values; })(); break; // ██╗ ██╗██████╗ ██████╗ █████╗ ████████╗███████╗ // ██║ ██║██╔══██╗██╔══██╗██╔══██╗╚══██╔══╝██╔════╝ // ██║ ██║██████╔╝██║ ██║███████║ ██║ █████╗ // ██║ ██║██╔═══╝ ██║ ██║██╔══██║ ██║ ██╔══╝ // ╚██████╔╝██║ ██████╔╝██║ ██║ ██║ ███████╗ // ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝ case 'update': queryOptions.criteria = { where: {} }; queryOptions.criteria.where[Model.primaryKey] = req.param('id'); // Note that we do NOT set `fetch: true`, because if we do so, some versions // of Waterline complain that `fetch` need not be included with .updateOne(). // (Now that we take advantage of .updateOne() in blueprints, this is a thing.) queryOptions.meta = {}; // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐ // ├─┘├─┤├┬┘└─┐├┤ └┐┌┘├─┤│ │ │├┤ └─┐ // ┴ ┴ ┴┴└─└─┘└─┘ └┘ ┴ ┴┴─┘└─┘└─┘└─┘ queryOptions.valuesToSet = (function getValuesToSet(){ // Use all of the request params as values for the new record, _except_ `id`. var values = _.omit(req.allParams(), 'id'); // No matter what, don't allow changing the PK via the update blueprint // (you should just drop and re-add the record if that's what you really want) if (typeof values[Model.primaryKey] !== 'undefined' && values[Model.primaryKey] !== queryOptions.criteria.where[Model.primaryKey]) { req._sails.log.warn('Cannot change primary key via update blueprint; ignoring value sent for `' + Model.primaryKey + '`'); } // Make sure the primary key is unchanged values[Model.primaryKey] = queryOptions.criteria.where[Model.primaryKey]; return values; })(); break; // ██████╗ ███████╗███████╗████████╗██████╗ ██████╗ ██╗ ██╗ // ██╔══██╗██╔════╝██╔════╝╚══██╔══╝██╔══██╗██╔═══██╗╚██╗ ██╔╝ // ██║ ██║█████╗ ███████╗ ██║ ██████╔╝██║ ██║ ╚████╔╝ // ██║ ██║██╔══╝ ╚════██║ ██║ ██╔══██╗██║ ██║ ╚██╔╝ // ██████╔╝███████╗███████║ ██║ ██║ ██║╚██████╔╝ ██║ // ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ case 'destroy': queryOptions.criteria = {}; queryOptions.criteria = { where: {} }; queryOptions.criteria.where[Model.primaryKey] = req.param('id'); // Set `fetch: true` queryOptions.meta = { fetch: true }; break; // █████╗ ██████╗ ██████╗ // ██╔══██╗██╔══██╗██╔══██╗ // ███████║██║ ██║██║ ██║ // ██╔══██║██║ ██║██║ ██║ // ██║ ██║██████╔╝██████╔╝ // ╚═╝ ╚═╝╚═════╝ ╚═════╝ case 'add': if (!req.options.alias) { throw new Error('Missing required route option, `req.options.alias`.'); } queryOptions.alias = req.options.alias; queryOptions.targetRecordId = req.param('parentid'); queryOptions.associatedIds = [req.param('childid')]; break; // ██████╗ ███████╗███╗ ███╗ ██████╗ ██╗ ██╗███████╗ // ██╔══██╗██╔════╝████╗ ████║██╔═══██╗██║ ██║██╔════╝ // ██████╔╝█████╗ ██╔████╔██║██║ ██║██║ ██║█████╗ // ██╔══██╗██╔══╝ ██║╚██╔╝██║██║ ██║╚██╗ ██╔╝██╔══╝ // ██║ ██║███████╗██║ ╚═╝ ██║╚██████╔╝ ╚████╔╝ ███████╗ // ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═══╝ ╚══════╝ case 'remove': if (!req.options.alias) { throw new Error('Missing required route option, `req.options.alias`.'); } queryOptions.alias = req.options.alias; queryOptions.targetRecordId = req.param('parentid'); queryOptions.associatedIds = [req.param('childid')]; break; // ██████╗ ███████╗██████╗ ██╗ █████╗ ██████╗███████╗ // ██╔══██╗██╔════╝██╔══██╗██║ ██╔══██╗██╔════╝██╔════╝ // ██████╔╝█████╗ ██████╔╝██║ ███████║██║ █████╗ // ██╔══██╗██╔══╝ ██╔═══╝ ██║ ██╔══██║██║ ██╔══╝ // ██║ ██║███████╗██║ ███████╗██║ ██║╚██████╗███████╗ // ╚═╝ ╚═╝╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝╚══════╝ case 'replace': if (!req.options.alias) { throw new Error('Missing required route option, `req.options.alias`.'); } queryOptions.alias = req.options.alias; queryOptions.criteria = {}; queryOptions.criteria = { where: {} }; queryOptions.targetRecordId = req.param('parentid'); queryOptions.associatedIds = _.isArray(req.body) ? req.body : req.query[req.options.alias]; if (_.isString(queryOptions.associatedIds)) { try { queryOptions.associatedIds = JSON.parse(queryOptions.associatedIds); } catch (e) { throw flaverr({ name: 'UsageError', raw: e }, new Error( 'The associated ids provided in this request (for the `' + req.options.alias + '` collection) are not valid. '+ 'If specified as a string, the associated ids provided to the "replace" blueprint action must be parseable as '+ 'a JSON array, e.g. `[1, 2]`.' // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUTURE: Use smart example depending on the expected pk type (e.g. if string, show mongo ids instead) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - )); }//</catch> } break; // ██████╗ ██████╗ ██████╗ ██╗ ██╗██╗ █████╗ ████████╗███████╗ // ██╔══██╗██╔═══██╗██╔══██╗██║ ██║██║ ██╔══██╗╚══██╔══╝██╔════╝ // ██████╔╝██║ ██║██████╔╝██║ ██║██║ ███████║ ██║ █████╗ // ██╔═══╝ ██║ ██║██╔═══╝ ██║ ██║██║ ██╔══██║ ██║ ██╔══╝ // ██║ ╚██████╔╝██║ ╚██████╔╝███████╗██║ ██║ ██║ ███████╗ // ╚═╝ ╚═════╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ case 'populate': if (!req.options.alias) { throw new Error('Missing required route option, `req.options.alias`.'); } var association = _.find(Model.associations, {alias: req.options.alias}); if (!association) { throw new Error('Consistency violation: `populate` blueprint could not find association `' + req.options.alias + '` in model `' + Model.globalId + '`.'); } queryOptions.alias = req.options.alias; queryOptions.criteria = {}; // ┌─┐┌─┐┬─┐┌─┐┌─┐ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ // ├─┘├─┤├┬┘└─┐├┤ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴ ┴┴└─└─┘└─┘ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ queryOptions.criteria = {}; queryOptions.criteria = { where: {} }; queryOptions.criteria.where[Model.primaryKey] = req.param('parentid'); queryOptions.populates = {}; queryOptions.populates[req.options.alias] = {}; // If this is a to-many association, add a `where` clause. if (association.collection) { queryOptions.populates[req.options.alias].where = (function getPopulateCriteria(){ var where = req.allParams().where; // If `where` parameter is a string, try to interpret it as JSON. // (If it cannot be parsed, throw a UsageError.) if (_.isString(where)) { try { where = JSON.parse(where); } catch (e) { throw flaverr({ name: 'UsageError' }, new Error('Could not JSON.parse() the provided `where` clause. Here is the raw error: '+e.stack)); } }//>-• // If `where` has not been specified, but other unbound parameter variables // **ARE** specified, build the `where` option using them. if (!where) { // Prune params which aren't fit to be used as `where` criteria // to build a proper where query where = req.allParams(); // Omit built-in runtime config (like top-level criteria clauses) where = _.omit(where, ['limit', 'skip', 'sort', 'populate', 'select', 'omit', 'parentid']); // - - - - - - - - - - - - - - - - - - - - - // ^^TODO: what about `where` itself? // - - - - - - - - - - - - - - - - - - - - - // Omit any params that have `undefined` on the RHS. where = _.omit(where, function(p) { if (_.isUndefined(p)) { return true; } }); }//>- // Return final `where`. return where; })(); } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┌─┐┬ ┌─┐┌─┐┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ └─┐├┤ │ ├┤ │ │ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘└─┘┴─┘└─┘└─┘ ┴ if (!_.isUndefined(req.param('select'))) { queryOptions.populates[req.options.alias].select = req.param('select').split(',').map(function(attribute) {return attribute.trim();}); } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┌┬┐┬┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ │ │││││ │ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘┴ ┴┴ ┴ else if (!_.isUndefined(req.param('omit'))) { queryOptions.populates[req.options.alias].omit = req.param('omit').split(',').map(function(attribute) {return attribute.trim();}); } // // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┬ ┬┌┬┐┬┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ │ │││││ │ // ┴ ┴ ┴┴└─└─┘└─┘ ┴─┘┴┴ ┴┴ ┴ if (!_.isUndefined(req.param('limit'))) { queryOptions.populates[req.options.alias].limit = req.param('limit'); } // If this is a to-many association, use the default limit if not was provided. else if (association.collection) { queryOptions.populates[req.options.alias].limit = DEFAULT_LIMIT; } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┬┌─┬┌─┐ // ├─┘├─┤├┬┘└─┐├┤ └─┐├┴┐│├─┘ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘┴ ┴┴┴ if (!_.isUndefined(req.param('skip'))) { queryOptions.populates[req.options.alias].skip = req.param('skip'); } // ┌─┐┌─┐┬─┐┌─┐┌─┐ ┌─┐┌─┐┬─┐┌┬┐ // ├─┘├─┤├┬┘└─┐├┤ └─┐│ │├┬┘ │ // ┴ ┴ ┴┴└─└─┘└─┘ └─┘└─┘┴└─ ┴ if (!_.isUndefined(req.param('sort'))) { queryOptions.populates[req.options.alias].sort = (function getSortCriteria() { var sort = req.param('sort'); if (_.isUndefined(sort)) {return undefined;} // If `sort` is a string, attempt to JSON.parse() it. // (e.g. `{"name": 1}`) if (_.isString(sort)) { try { sort = JSON.parse(sort); // If it is not valid JSON (e.g. because it's just a normal string), // then fall back to interpreting it as-is (e.g. "fullName ASC") } catch(unusedErr) {} } return sort; })();//ˆ } break; } return queryOptions; };
orm.initialize = function initialize(options, done) { try { // First, verify traditional settings, check compat.: // ============================================================================================= // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUTURE: In WL 0.14, deprecate support for this method in favor of the simplified // `Waterline.start()` (see bottom of this file). In WL 1.0, remove it altogether. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Ensure the ORM hasn't already been initialized. // (This prevents all sorts of issues, because model definitions are modified in-place.) if (_.keys(modelMap).length > 0) { throw new Error('A Waterline ORM instance cannot be initialized more than once. To reset the ORM, create a new instance of it by running `new Waterline()`.'); } // Backwards-compatibility for `connections`: if (!_.isUndefined(options.connections)){ // Sanity check assert(_.isUndefined(options.datastores), 'Attempted to provide backwards-compatibility for `connections`, but `datastores` was ALSO defined! This should never happen.'); options.datastores = options.connections; console.warn('\n'+ 'Warning: `connections` is no longer supported. Please use `datastores` instead.\n'+ 'I get what you mean, so I temporarily renamed it for you this time, but here is a stack trace\n'+ 'so you know where this is coming from in the code, and can change it to prevent future warnings:\n'+ '```\n'+ (new Error()).stack+'\n'+ '```\n' ); delete options.connections; }//>- // Usage assertions if (_.isUndefined(options) || !_.keys(options).length) { throw new Error('Usage Error: .initialize(options, callback)'); } if (_.isUndefined(options.adapters) || !_.isPlainObject(options.adapters)) { throw new Error('Options must contain an `adapters` dictionary'); } if (_.isUndefined(options.datastores) || !_.isPlainObject(options.datastores)) { throw new Error('Options must contain a `datastores` dictionary'); } // - - - - - - - - - - - - - - - - - - - - - // FUTURE: anchor ruleset checks // - - - - - - - - - - - - - - - - - - - - - // Next, validate ORM settings related to at-rest encryption, if it is in use. // ============================================================================================= var areAnyModelsUsingAtRestEncryption; _.each(wmds, function(wmd){ _.each(wmd.prototype.attributes, function(attrDef){ if (attrDef.encrypt !== undefined) { areAnyModelsUsingAtRestEncryption = true; } });//∞ });//∞ // Only allow using at-rest encryption for compatible Node versions var EA; if (areAnyModelsUsingAtRestEncryption) { var RX_NODE_MAJOR_DOT_MINOR = /^v([^.]+\.?[^.]+)\./; var parsedNodeMajorAndMinorVersion = process.version.match(RX_NODE_MAJOR_DOT_MINOR) && (+(process.version.match(RX_NODE_MAJOR_DOT_MINOR)[1])); var MIN_NODE_VERSION = 6; var isNativeCryptoFullyCapable = parsedNodeMajorAndMinorVersion >= MIN_NODE_VERSION; if (!isNativeCryptoFullyCapable) { throw new Error('Current installed node version\'s native `crypto` module is not fully capable of the necessary functionality for encrypting/decrypting data at rest with Waterline. To use this feature, please upgrade to Node v' + MIN_NODE_VERSION + ' or above, flush your node_modules, run npm install, and then try again. Otherwise, if you cannot upgrade Node.js, please remove the `encrypt` property from your models\' attributes.'); } EA = require('encrypted-attr'); }//fi _.each(wmds, function(wmd){ var modelDef = wmd.prototype; // Verify that `encrypt` attr prop is valid, if in use. var isThisModelUsingAtRestEncryption; try { _.each(modelDef.attributes, function(attrDef, attrName){ if (attrDef.encrypt !== undefined) { if (!_.isBoolean(attrDef.encrypt)){ throw flaverr({ code: 'E_INVALID_ENCRYPT', attrName: attrName, message: 'If set, `encrypt` must be either `true` or `false`.' }); }//• if (attrDef.encrypt === true){ isThisModelUsingAtRestEncryption = true; if (attrDef.type === 'ref') { throw flaverr({ code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', attrName: attrName, whyNotCompatible: 'with `type: \'ref\'` attributes.' }); }//• if (attrDef.autoCreatedAt || attrDef.autoUpdatedAt) { throw flaverr({ code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', attrName: attrName, whyNotCompatible: 'with `'+(attrDef.autoCreatedAt?'autoCreatedAt':'autoUpdatedAt')+'` attributes.' }); }//• if (attrDef.model || attrDef.collection) { throw flaverr({ code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', attrName: attrName, whyNotCompatible: 'with associations.' }); }//• if (attrDef.defaultsTo !== undefined) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUTURE: Consider adding support for this. Will require some refactoring // in order to do it right (i.e. otherwise we'll just be copying and pasting // the encryption logic.) We'll want to pull it out from normalize-value-to-set // into a new utility, then call that from the appropriate spot in // normalize-new-record in order to encrypt the initial default value. // // (See also the other note in normalize-new-record re defaultsTo + cloneDeep.) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - throw flaverr({ code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', attrName: attrName, whyNotCompatible: 'with an attribute that also specifies a `defaultsTo`. '+ 'Please remove the `defaultsTo` from this attribute definition.' }); }//• }//fi }//fi });//∞ } catch (err) { switch (err.code) { case 'E_INVALID_ENCRYPT': throw flaverr({ message: 'Invalid usage of `encrypt` in the definition for `'+modelDef.identity+'` model\'s '+ '`'+err.attrName+'` attribute. '+err.message }, err); case 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION': throw flaverr({ message: 'Invalid usage of `encrypt` in the definition for `'+modelDef.identity+'` model\'s '+ '`'+err.attrName+'` attribute. At-rest encryption (`encrypt: true`) cannot be used '+ err.whyNotCompatible }, err); default: throw err; } } // Verify `dataEncryptionKeys`. // (Remember, if there is a secondary key system in use, these DEKs should have // already been "unwrapped" before they were passed in to Waterline as model settings.) if (modelDef.dataEncryptionKeys !== undefined) { if (!_.isObject(modelDef.dataEncryptionKeys) || _.isArray(modelDef.dataEncryptionKeys) || _.isFunction(modelDef.dataEncryptionKeys)) { throw flaverr({ message: 'In the definition for the `'+modelDef.identity+'` model, the `dataEncryptionKeys` model setting '+ 'is invalid. If specified, `dataEncryptionKeys` must be a dictionary (plain JavaScript object).' }); }//• // Check all DEKs for validity. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // (FUTURE: maybe extend EA to support a `validateKeys()` method instead of this-- // or at least to have error code) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - try { _.each(modelDef.dataEncryptionKeys, function(dek, dekId){ if (!dek || !_.isString(dek)) { throw flaverr({ code: 'E_INVALID_DATA_ENCRYPTION_KEYS', dekId: dekId, message: 'Must be a cryptographically random, 32 byte string.' }); }//• if (!dekId.match(/^[a-z\$]([a-z0-9])*$/i)){ throw flaverr({ code: 'E_INVALID_DATA_ENCRYPTION_KEYS', dekId: dekId, message: 'Please make sure the ids of all of your data encryption keys begin with a letter and do not contain any special characters.' }); }//• if (areAnyModelsUsingAtRestEncryption) { try { EA(undefined, { keys: modelDef.dataEncryptionKeys, keyId: dekId }).encryptAttribute(undefined, 'test-value-purely-for-validation'); } catch (err) { throw flaverr({ code: 'E_INVALID_DATA_ENCRYPTION_KEYS', dekId: dekId }, err); } } });//∞ } catch (err) { switch (err.code) { case 'E_INVALID_DATA_ENCRYPTION_KEYS': throw flaverr({ message: 'In the definition for the `'+modelDef.identity+'` model, one of the data encryption keys (`dataEncryptionKeys.'+err.dekId+'`) is invalid.\n'+ 'Details:\n'+ ' '+err.message }, err); default: throw err; } } }//fi // If any attrs have `encrypt: true`, verify that there is both a valid // `dataEncryptionKeys` dictionary and a valid `dataEncryptionKeys.default` DEK set. if (isThisModelUsingAtRestEncryption) { if (!modelDef.dataEncryptionKeys || !modelDef.dataEncryptionKeys.default) { throw flaverr({ message: 'DEKs should be 32 bytes long, and cryptographically random. A random, default DEK is included '+ 'in new Sails apps, so one easy way to generate a new DEK is to generate a new Sails app. '+ 'Alternatively, you could run:\n'+ ' require(\'crypto\').randomBytes(32).toString(\'base64\')\n'+ '\n'+ 'Remember: once in production, you should manage your DEKs like you would any other sensitive credential. '+ 'For example, one common best practice is to configure them using environment variables.\n'+ 'In a Sails app:\n'+ ' sails_models__dataEncryptionKeys__default=vpB2EhXaTi+wYKUE0ojI5cVQX/VRGP++Fa0bBW/NFSs=\n'+ '\n'+ ' [?] If you\'re unsure or want advice, head over to https://sailsjs.com/support' }); }//• }//fi });//∞ // Next, set up support for the default archive, and validate related settings: // ============================================================================================= var DEFAULT_ARCHIVE_MODEL_IDENTITY = 'archive'; // Notes for use in docs: // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // • To choose which datastore the Archive model will live in: // // …in top-level orm settings: // archiveModelIdentity: 'myarchive', // // …in 'MyArchive' model: // datastore: 'foo' // // // • To choose the `tableName` and `columnName`s for your Archive model: // …in top-level orm settings: // archiveModelIdentity: 'archive', // // …in 'archive' model: // tableName: 'foo', // attributes: { // originalRecord: { type: 'json', columnName: 'barbaz' }, // fromModel: { type: 'string', columnName: 'bingbong' } // } // // // • To disable support for the `.archive()` model method: // // …in top-level orm settings: // archiveModelIdentity: false // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - var archiversInfoByArchiveIdentity = {}; _.each(wmds, function(wmd){ var modelDef = wmd.prototype; // console.log('· checking `'+util.inspect(wmd,{depth:null})+'`…'); // console.log('· checking `'+modelDef.identity+'`…'); // Check the `archiveModelIdentity` model setting. if (modelDef.archiveModelIdentity === undefined) { if (modelDef.archiveModelIdentity !== modelDef.identity) { // console.log('setting default archiveModelIdentity for model `'+modelDef.identity+'`…'); modelDef.archiveModelIdentity = DEFAULT_ARCHIVE_MODEL_IDENTITY; } else { // A model can't be its own archive model! modelDef.archiveModelIdentity = false; } }//fi if (modelDef.archiveModelIdentity === false) { // This will cause the .archive() method for this model to error out and explain // that the feature was explicitly disabled. } else if (modelDef.archiveModelIdentity === modelDef.identity) { return done(new Error('Invalid `archiveModelIdentity` setting. A model cannot be its own archive! But model `'+modelDef.identity+'` has `archiveModelIdentity: \''+modelDef.archiveModelIdentity+'\'`.')); } else if (!modelDef.archiveModelIdentity || !_.isString(modelDef.archiveModelIdentity)){ return done(new Error('Invalid `archiveModelIdentity` setting. If set, expecting either `false` (to disable .archive() altogether) or the identity of a registered model (e.g. "archive"), but instead got: '+util.inspect(options.defaults.archiveModelIdentity,{depth:null}))); }//fi // Keep track of the model identities of all archive models, as well as info about the models using them. if (modelDef.archiveModelIdentity !== false) { if (!_.contains(Object.keys(archiversInfoByArchiveIdentity), modelDef.archiveModelIdentity)) { // Save an initial info dictionary: archiversInfoByArchiveIdentity[modelDef.archiveModelIdentity] = { archivers: [] }; }//fi archiversInfoByArchiveIdentity[modelDef.archiveModelIdentity].archivers.push(modelDef); }//fi });//∞ // If any models are using the default archive, then register the default archive model // if it isn't already registered. if (_.contains(Object.keys(archiversInfoByArchiveIdentity), DEFAULT_ARCHIVE_MODEL_IDENTITY)) { // Inject the built-in Archive model into the ORM's ontology: // • id (pk-- string or number, depending on where the Archive model is being stored) // • createdAt (timestamp-- this is effectively ≈ "archivedAt") // • originalRecord (json-- the original record, completely unpopulated) // • originalRecordId (pk-- string or number, the pk of the original record) // • fromModel (string-- the original model identity) // // > Note there's no updatedAt! var existingDefaultArchiveWmd = _.find(wmds, function(wmd){ return wmd.prototype.identity === DEFAULT_ARCHIVE_MODEL_IDENTITY; }); if (!existingDefaultArchiveWmd) { var defaultArchiversInfo = archiversInfoByArchiveIdentity[DEFAULT_ARCHIVE_MODEL_IDENTITY]; // Arbitrarily pick the first archiver. // (we'll use this to derive a datastore and pk style so that they both match) var arbitraryArchiver = defaultArchiversInfo.archivers[0]; // console.log('arbitraryArchiver', arbitraryArchiver); var newWmd = Waterline.Model.extend({ identity: DEFAULT_ARCHIVE_MODEL_IDENTITY, // > Note that we inject a "globalId" for potential use in higher-level frameworks (e.g. Sails) // > that might want to globalize this model. This way, it'd show up as "Archive" instead of "archive". // > Remember: Waterline is NOT responsible for any globalization itself, this is just advisory. globalId: _.capitalize(DEFAULT_ARCHIVE_MODEL_IDENTITY), primaryKey: 'id', datastore: arbitraryArchiver.datastore, attributes: { id: arbitraryArchiver.attributes[arbitraryArchiver.primaryKey], createdAt: { type: 'number', autoCreatedAt: true, autoMigrations: { columnType: '_numbertimestamp' } }, fromModel: { type: 'string', required: true, autoMigrations: { columnType: '_string' } }, originalRecord: { type: 'json', required: true, autoMigrations: { columnType: '_json' } }, // Use `type:'json'` for this: // (since it might contain pks for records from different datastores) originalRecordId: { type: 'json', autoMigrations: { columnType: '_json' } }, } }); wmds.push(newWmd); }//fi }//fi // Now make sure all archive models actually exist, and that they're valid. _.each(archiversInfoByArchiveIdentity, function(archiversInfo, archiveIdentity) { var archiveWmd = _.find(wmds, function(wmd){ return wmd.prototype.identity === archiveIdentity; }); if (!archiveWmd) { throw new Error('Invalid `archiveModelIdentity` setting. A model declares `archiveModelIdentity: \''+archiveIdentity+'\'`, but there\'s no other model actually registered with that identity to use as an archive!'); } // Validate that this archive model can be used for the purpose of Waterline's .archive() // > (note that the error messages here should be considerate of the case where someone is // > upgrading their app from an older version of Sails/Waterline and might happen to have // > a model named "Archive".) var EXPECTED_ATTR_NAMES = ['id', 'createdAt', 'fromModel', 'originalRecord', 'originalRecordId']; var actualAttrNames = _.keys(archiveWmd.prototype.attributes); var namesOfMissingAttrs = _.difference(EXPECTED_ATTR_NAMES, actualAttrNames); try { if (namesOfMissingAttrs.length > 0) { throw flaverr({ code: 'E_INVALID_ARCHIVE_MODEL', because: 'it is missing '+ namesOfMissingAttrs.length+' mandatory attribute'+(namesOfMissingAttrs.length===1?'':'s')+': '+namesOfMissingAttrs+'.' }); }//• if (archiveWmd.prototype.primaryKey !== 'id') { throw flaverr({ code: 'E_INVALID_ARCHIVE_MODEL', because: 'it is using an attribute other than `id` as its logical primary key attribute.' }); }//• if (_.any(EXPECTED_ATTR_NAMES, { encrypt: true })) { throw flaverr({ code: 'E_INVALID_ARCHIVE_MODEL', because: 'it is using at-rest encryption on one of its mandatory attributes, when it shouldn\'t be.' }); }//• // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUTURE: do more checks (there's a lot of things we should probably check-- e.g. the `type` of each // mandatory attribute, that no crazy defaultsTo is provided, that the auto-timestamp is correct, etc.) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - } catch (err) { switch (err.code) { case 'E_INVALID_ARCHIVE_MODEL': throw new Error( 'The `'+archiveIdentity+'` model cannot be used as a custom archive, because '+err.because+'\n'+ 'Please adjust this custom archive model accordingly, or otherwise switch to a different '+ 'model as your custom archive. (For reference, this `'+archiveIdentity+'` model this is currently '+ 'configured as the custom archive model for '+archiversInfo.archivers.length+' other '+ 'model'+(archiversInfo.archivers.length===1?'':'s')+': '+_.pluck(archiversInfo.archivers, 'identity')+'.' ); default: throw err; } } });//∞ // Build up a dictionary of datastores (used by our models?) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: verify the last part of that statement ^^ (not seeing how this is related to "used by our models") // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // ================================================================= try { datastoreMap = buildDatastoreMap(options.adapters, options.datastores); } catch (err) { throw err; } // Now check out the models and build a schema map (using wl-schema) // ================================================================= var internalSchema; try { internalSchema = new Schema(wmds, options.defaults); } catch (err) { throw err; } // Check the internal "schema map" for any junction models that were // implicitly introduced above and handle them. _.each(_.keys(internalSchema), function(table) { if (internalSchema[table].junctionTable) { // Whenever one is found, flag it as `_private: true` and generate // a custom constructor for it (based on a clone of the `BaseMetaModel` // constructor), then push it on to our set of wmds. internalSchema[table]._private = true; wmds.push(BaseMetaModel.extend(internalSchema[table])); }//fi });//∞ // Now build live models // ================================================================= // Hydrate each model definition (in-place), and also set up a // reference to it in the model map. _.each(wmds, function (wmd) { // Set the attributes and schema values using the normalized versions from // Waterline-Schema where everything has already been processed. var schemaVersion = internalSchema[wmd.prototype.identity]; // Set normalized values from the schema version on the model definition. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUTURE: no need to use a prototype here, so let's avoid it to minimize future boggling // (or if we determine it significantly improves the performance of ORM initialization, then // let's keep it, but document that here and leave a link to the benchmark as a comment) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - wmd.prototype.identity = schemaVersion.identity; wmd.prototype.tableName = schemaVersion.tableName; wmd.prototype.datastore = schemaVersion.datastore; wmd.prototype.primaryKey = schemaVersion.primaryKey; wmd.prototype.meta = schemaVersion.meta; wmd.prototype.attributes = schemaVersion.attributes; wmd.prototype.schema = schemaVersion.schema; wmd.prototype.hasSchema = schemaVersion.hasSchema; // Mixin junctionTable or throughTable if available if (_.has(schemaVersion, 'junctionTable')) { wmd.prototype.junctionTable = schemaVersion.junctionTable; } if (_.has(schemaVersion, 'throughTable')) { wmd.prototype.throughTable = schemaVersion.throughTable; } var WLModel = buildLiveWLModel(wmd, datastoreMap, context); // Store the live Waterline model so it can be used // internally to create other records modelMap[WLModel.identity] = WLModel; }); } catch (err) { return done(err); } // Finally, register datastores. // ================================================================= // Simultaneously register each datastore with the correct adapter. // (This is async because the `registerDatastore` method in adapters // is async. But since they're not interdependent, we run them all in parallel.) async.each(_.keys(datastoreMap), function(datastoreName, next) { var datastore = datastoreMap[datastoreName]; if (_.isFunction(datastore.adapter.registerConnection)) { return next(new Error('The adapter for datastore `' + datastoreName + '` is invalid: the `registerConnection` method must be renamed to `registerDatastore`.')); } try { // Note: at this point, the datastore should always have a usable adapter // set as its `adapter` property. // Check if the datastore's adapter has a `registerDatastore` method if (!_.has(datastore.adapter, 'registerDatastore')) { // FUTURE: get rid of this `setImmediate` (or if it's serving a purpose, document what that is) setImmediate(function() { next(); });//_∏_ return; }//-• // Add the datastore name as the `identity` property in its config. datastore.config.identity = datastoreName; // Get the identities of all the models which use this datastore, and then build up // a simple mapping that can be passed down to the adapter. var usedSchemas = {}; var modelIdentities = _.uniq(datastore.collections); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // TODO: figure out if we still need this `uniq` or not. If so, document why. // If not, remove it. (hopefully the latter) // // e.g. // ``` // assert(modelIdentities.length === datastore.collections.length); // ``` // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _.each(modelIdentities, function(modelIdentity) { var WLModel = modelMap[modelIdentity]; // Track info about this model by table name (for use in the adapter) var tableName; if (_.has(Object.getPrototypeOf(WLModel), 'tableName')) { tableName = Object.getPrototypeOf(WLModel).tableName; } else { tableName = modelIdentity; } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUTURE: Suck the `getPrototypeOf()` poison out of this stuff. Mike is too dumb for this. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - assert(WLModel.tableName === tableName, 'Expecting `WLModel.tableName === tableName`. (Please open an issue: http://sailsjs.com/bugs)'); assert(WLModel.identity === modelIdentity, 'Expecting `WLModel.identity === modelIdentity`. (Please open an issue: http://sailsjs.com/bugs)'); assert(WLModel.primaryKey && _.isString(WLModel.primaryKey), 'How flabbergasting! Expecting truthy string in `WLModel.primaryKey`, but got something else. (If you\'re seeing this, there\'s probably a bug in Waterline. Please open an issue: http://sailsjs.com/bugs)'); assert(WLModel.schema && _.isObject(WLModel.schema), 'Expecting truthy string in `WLModel.schema`, but got something else. (Please open an issue: http://sailsjs.com/bugs)'); usedSchemas[tableName] = { primaryKey: WLModel.primaryKey, definition: WLModel.schema, tableName: tableName, identity: modelIdentity }; });//</ each model identity > // Call the `registerDatastore` adapter method. datastore.adapter.registerDatastore(datastore.config, usedSchemas, next); } catch (err) { return next(err); } }, function(err) { if (err) { return done(err); } // Build up and return the ontology. return done(undefined, { collections: modelMap, datastores: datastoreMap }); });//</async.each> };//</ definition of `orm.initialize` >
part.on('readable', function onBytesAvailable() { var buffer = ''; var chunk; while (null !== (chunk = part.read())) { buffer += chunk; } // New bytes available for text param: if (buffer) { // FUTURE: make `maxFieldsSize` directly configurable via `options` self.form._fieldsSize += buffer.length; if (self.form._fieldsSize > self.form.maxFieldsSize) { self.form._error(new Error('maxFieldsSize exceeded, received ' + self.form._fieldsSize + ' bytes of field data')); return; } value += decoder.write(buffer); debug('Parser: Read a chunk of textparam through field `' + field + '`'); return; }//• // Otherwise, IWMIH, buffer is null, meaning we've now received all of the // bytes from the textparam. debug('Parser: Done reading textparam through field `' + field + '`'); // Disregard this field if it is a problematic field name (e.g. "constructor" or "__proto__"). // > Inspired by https://github.com/ljharb/qs/commit/dbf049d9fafb5c0716a3bcae80af6f4b8337b9f4 // > And https://github.com/sailshq/lodash/commit/3f3d78dc3f756f4148a3832798e868ca7192c817#diff-6d186b954a58d5bb740f73d84fe39073R640 if (Object.prototype.hasOwnProperty(field)) { textParamMetadata.done = true;// « see below for explanation of what this is doing return; }//• // Since this is a multipart HTTP request, there's no widely-adopted and // widely-trusted convention for sending big, complex data structures as // text parameters from the client, then losslessly rehydrating them on // the server. // // So to work around this, we'll check for a special "X-JSON-MPU-Params" header // in the request (could be sent by anything, but if present, it was likely // sent from Parasails/Cloud SDK). If this header is set, and clearly // formatted for our use case here, then we'll attempt to JSON.parse() the // body text params that it indicates. // > Note that we are tolerant towards problems, since it is hypothetically // > possible it could be sent for some unrelated reason. // // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // > Note that text parameters can (probably) not be encoded in a format like // > JSON as part of the multipart upload HTTP spec (at least not cleanly). // > For more information/discussion on this, see: // > • https://github.com/postmanlabs/postman-app-support/issues/3331 // > • https://github.com/postmanlabs/postman-app-support/issues/1104 // > // > Also note that Sails/parasails/Cloud SDK automatically appends a special // > header to the request called "X-JSON-MPU-Params" which identifies the names // > of text parameters that are sent as stringified JSON and thus should be // > parsed as such on the server. (Only relevant for "multipart/form-data".) // > // > Last but not least, some historical context: // > • https://github.com/sailshq/machine-as-action/commit/16062c568d0587ea0b228613a686071666b6e690 // > • see GitHub comments on that commit for related changes in other packages // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (_.isFunction(self.req.get)) { var rawXJSONMPUParamsHeader = self.req.get('X-JSON-MPU-Params'); if (rawXJSONMPUParamsHeader && _.isString(rawXJSONMPUParamsHeader) && _.find(rawXJSONMPUParamsHeader.split(','), function(paramName) { return paramName === field; })) { try { value = JSON.parse(value); } catch (unusedErr) { /* Silently ignore any JSON parsing errors (as explained above) */ } }//fi }//fi // If `req.body` already contains `field`, and this is the first duplicate value // (i.e. the second value to come in for this param) track it as a "multifield" // and build an array of param values. // (We have to do this in case the original value was an array itself- we wouldn't // want to push subsequent values onto THAT array, y'know?) // // > FUTURE: remove this "multifield" behavior (see above for explanation) if (self.req.body[field]) { if (self.multifields[field]) { self.req.body[field].push(value); } else { debug('`' + field + '` param already exists in req.body, converting into a "multifield"...'); self.req.body[field] = [self.req.body[field]]; self.multifields[field] = true; self.req.body[field].push(value); } } else { self.req.body[field] = value; } // Mark that this textParam is done streaming in data in its // `textParamMetadata` object. This is monitored and used so // we know to wait for any known textParams to finish streaming // before we pass control to the app. textParamMetadata.done = true; });
receiver__.on('writefile', function(fileStream) { var file = _.find(self._files, {stream: fileStream}); file.status = 'finished'; });
var disallowed = _.filter(originalSchema, function(field) { return !_.find(allowedSchema, { name: field.name }); });