fs.readdir(voiceDir, function(err, files) { _.each(files, function(val) { fs.unlinkSync(voiceDir + '/' + val); }); res.send('cleared'); });
grunt.registerTask('genpackageindex', 'Generate package index files', function(){ var opts = grunt.task.current.options(); //go through the folders from this root and generate an index file for each one of them _.each(glob.sync(opts.files), checkFolder); });
Project.applyDefaults = function (projects) { _.each(projects, function (project) { _.defaults(project, { deleted: false, unlisted: false, closed: false }); }); return projects; };
module.exports = function(gulp, options) { 'use strict'; var _ = require('underscore'); // postcss plugins var path = require('path'); var customMedia = require('postcss-custom-media'); var nested = require('postcss-nested'); var autoprefixer = require('autoprefixer'); var extend = require('deep-extend'); var atImport = require('postcss-import'); var defaults = { projectInfo: { versionRegex: /(version\s?[=:]\s?)["']\d.{3,}['"]/g, // version: 0.0.0 || version = 0.0.0 versionFiles: [], }, paths: { tmp: '.tmp', app: 'app', dist: 'dist', test: 'test', translations: '' }, postcssProcessors: [ atImport, customMedia, nested, autoprefixer({ browsers: [ 'ie >= 10', 'ie_mob >= 10', 'ff >= 30', 'chrome >= 34', 'safari >= 7', 'opera >= 23', 'ios >= 7', 'android >= 4.4', 'bb >= 10' ] }) ], optimize: { html: { quotes: true, empty: true, spare: true, loose: true }, image: { progressive: true, interlaced: true, svgoPlugins: [ { collapseGroups: false } ] } }, vulcanize: { implicitStrip: true, stripComments: true, inlineCss: true, inlineScripts: true } }; // Merge user settings with default config var config = extend({}, defaults, options); // Get the correct dist path config.findPath = function(subpath) { return !subpath ? config.paths.dist : path.join(config.paths.dist, subpath); }; // Get the correct tmp path config.findPathTmp = function(subpath) { return !subpath ? config.paths.tmp : path.join(config.paths.tmp, subpath); }; // Load tasks for web-component-tester // Adds tasks for `gulp test:local` and `gulp test:remote` // require('web-component-tester').gulp.init(gulp); // Load custom tasks from the `tasks` directory try { var tasks = require('require-dir')('./tasks'); _.each(tasks, function(task) { task(gulp, config); }); } catch (err) { console.log('Error: ', err); } return config; };
var applyArrayInObject = function (selfName, resultName, identifier) { result[resultName] = {}; _.each(app.config[selfName], function (selfItem) { result[resultName][selfItem[identifier]] = selfItem; }); };
exports.initialize = function(pathsObj){ _.each(pathsObj, function(path, type) { exports.paths[type] = path; }); };
var launchOneMongoAndWaitForReadyForInitiate = function (dbPath, port, portFile) { files.mkdir_p(dbPath, 0o755); var proc = null; if (options.allowKilling) { findMongoAndKillItDead(port, dbPath); } if (options.multiple) { // This is only for testing, so we're OK with incurring the replset // setup on each startup. files.rm_recursive(dbPath); files.mkdir_p(dbPath, 0o755); } else if (portFile) { var portFileExists = false; var matchingPortFileExists = false; try { matchingPortFileExists = +(files.readFile(portFile)) === port; portFileExists = true; } catch (e) { if (!e || e.code !== 'ENOENT') { throw e; } } // If this is the first time we're using this DB, or we changed port since // the last time, then we want to destroy any existing replSet // configuration and create a new one. First we delete the "local" // database if it exists. (It's a pain and slow to change the port in an // existing replSet configuration. It's also a little slow to initiate a // new replSet, thus the attempt to not do it unless the port changes.) // // In the "multiple" case, we just wipe out the entire database and incur // the cost, because this won't affect normal users running meteor. if (!matchingPortFileExists) { // Delete the port file if it exists, so we don't mistakenly believe // that the DB is still configured. if (portFileExists) { files.unlink(portFile); } try { var dbFiles = files.readdir(dbPath); } catch (e) { if (!e || e.code !== 'ENOENT') { throw e; } } _.each(dbFiles, function (dbFile) { if (/^local\./.test(dbFile)) { files.unlink(files.pathJoin(dbPath, dbFile)); } }); } } // Let's not actually start a process if we yielded (eg during // findMongoAndKillItDead) and we decided to stop in the middle (eg, because // we're in multiple mode and another process exited). if (stopped) { return; } proc = spawnMongod(mongod_path, port, dbPath, replSetName); function stop() { if (proc) { proc.removeListener('exit', procExitHandler); proc.kill('SIGINT'); proc = null; } } require("../tool-env/cleanup.js").onExit(stop); subHandles.push({ stop }); var procExitHandler = fiberHelpers.bindEnvironment(function (code, signal) { // Defang subHandle.stop(). proc = null; // Kill any other processes too. This will also remove // procExitHandler from the other processes, so onExit will only be called // once. handle.stop(); // Invoke the outer onExit callback. onExit(code, signal, stderrOutput, detectedErrors); }); proc.on('exit', procExitHandler); var listening = false; var replSetReadyToBeInitiated = false; var replSetReady = false; var maybeReadyToTalk; var readyToTalkPromise = new Promise(function (resolve) { maybeReadyToTalk = function () { if (resolve && listening && (noOplog || replSetReadyToBeInitiated || replSetReady)) { proc.stdout.removeListener('data', stdoutOnData); resolve(); resolve = null; } }; }); var stopOrReadyPromise = Promise.race([ stopPromise, readyToTalkPromise, ]); var detectedErrors = {}; var stdoutOnData = fiberHelpers.bindEnvironment(function (data) { // note: don't use "else ifs" in this, because 'data' can have multiple // lines if (/\[initandlisten\] Did not find local replica set configuration document at startup/.test(data) || /\[.*\] Locally stored replica set configuration does not have a valid entry for the current node/.test(data)) { replSetReadyToBeInitiated = true; maybeReadyToTalk(); } if (/ \[.*\] waiting for connections on port/.test(data)) { listening = true; maybeReadyToTalk(); } if (/ \[rsSync-0\] transition to primary complete/.test(data)) { replSetReady = true; maybeReadyToTalk(); } if (/Insufficient free space/.test(data)) { detectedErrors.freeSpace = true; } // Running against a old mmapv1 engine, probably from pre-mongo-3.2 Meteor if (/created by the 'mmapv1' storage engine, so setting the active storage engine to 'mmapv1'/.test(data)) { Console.warn(); Console.warn('Your development database is using mmapv1, ' + 'the old, pre-MongoDB 3.0 database engine. ' + 'You should consider upgrading to Wired Tiger, the new engine. ' + 'The easiest way to do so in development is to run ' + Console.command('meteor reset') + '. ' + "If you'd like to migrate your database, please consult " + Console.url('https://docs.mongodb.org/v3.0/release-notes/3.0-upgrade/')) Console.warn(); } if (/Invalid or no user locale set/.test(data)) { detectedErrors.badLocale = true; } }); proc.stdout.setEncoding('utf8'); proc.stdout.on('data', stdoutOnData); var stderrOutput = ''; proc.stderr.setEncoding('utf8'); proc.stderr.on('data', function (data) { stderrOutput += data; }); stopOrReadyPromise.await(); };
}, function(err, resp, body) { _.each(body.results, function (meetup) { rsvp_to_event(meetup.id) }) });
exports.restful = function(base, resource, id, fmt){ var format = function(path){ if (fmt) path.push(fmt) return path } var restapi = { LIST : { method: 'GET', path: format([(base || ''), resource]).join('/') }, CREATE : { method: 'POST', path: format([(base || ''), resource]).join('/') }, NEW : { method: 'GET', path: format([(base || ''), resource, 'new']).join('/') }, EDIT : { method: 'GET', path: format([(base || ''), resource, (id || '{0}'), 'edit']).join('/') }, SHOW : { method: 'GET', path: format([(base|| ''), resource, (id || '{0}')]).join('/') }, UPDATE : { method: 'PUT', path: format([(base|| ''), resource, (id || '{0}')]).join('/') }, DESTROY : { method: 'DELETE', path: format([(base|| ''), resource]).join('/') } } var content_type = 'application/json' if (fmt && fmt === '.xml') content_type = 'text/xml' // <-- is that right? _.each(_.keys(restapi), function(k){ var apicall = restapi[k]; apicall['headers'] = { 'Accept' : '*/*', 'Content-Type' : content_type } if (apicall.method !== 'POST' && apicall.method === 'PUT') { var headers = apicall['headers'] delete headers['Content-Type'] } }) return { actions : restapi, required: '', optional: '' } }
/** * Create the navigation sidebar. * @param {object} members The members that will be used to create the sidebar. * @param {array<object>} members.classes * @param {array<object>} members.externals * @param {array<object>} members.globals * @param {array<object>} members.mixins * @param {array<object>} members.modules * @param {array<object>} members.namespaces * @param {array<object>} members.tutorials * @param {array<object>} members.events * @return {string} The HTML for the navigation sidebar. */ function buildNav( members ) { var seen = {}; var nav = navigationMaster; if ( members.modules.length ) { members.modules.forEach( function ( m ) { if ( !hasOwnProp.call( seen, m.longname ) ) { nav.module.members.push( linkto( m.longname, m.name ) ); } seen[m.longname] = true; } ); } if ( members.externals.length ) { members.externals.forEach( function ( e ) { if ( !hasOwnProp.call( seen, e.longname ) ) { nav.external.members.push( linkto( e.longname, e.name.replace( /(^"|"$)/g, '' ) ) ); } seen[e.longname] = true; } ); } if ( members.classes.length ) { members.classes.forEach( function ( c ) { if ( !hasOwnProp.call( seen, c.longname ) ) { nav.class.members.push( linkto( c.longname, c.name ) ); } seen[c.longname] = true; } ); } if ( members.events.length ) { members.events.forEach( function ( e ) { if ( !hasOwnProp.call( seen, e.longname ) ) { nav.event.members.push( linkto( e.longname, e.name ) ); } seen[e.longname] = true; } ); } if ( members.namespaces.length ) { members.namespaces.forEach( function ( n ) { if ( !hasOwnProp.call( seen, n.longname ) ) { nav.namespace.members.push( linkto( n.longname, n.name ) ); } seen[n.longname] = true; } ); } if ( members.mixins.length ) { members.mixins.forEach( function ( m ) { if ( !hasOwnProp.call( seen, m.longname ) ) { nav.mixin.members.push( linkto( m.longname, m.name ) ); } seen[m.longname] = true; } ); } if ( members.tutorials.length ) { members.tutorials.forEach( function ( t ) { nav.tutorial.members.push( tutoriallink( t.name ) ); } ); } if ( members.globals.length ) { members.globals.forEach( function ( g ) { if ( g.kind !== 'typedef' && !hasOwnProp.call( seen, g.longname ) ) { nav.global.members.push( linkto( g.longname, g.name ) ); } seen[g.longname] = true; } ); } var topLevelNav = []; _.each( nav, function ( entry, name ) { if ( entry.members.length > 0 && name !== "index" ) { topLevelNav.push( { title : entry.title, link : entry.link, members : entry.members } ); } } ); nav.topLevelNav = topLevelNav; }
AuthApi.prototype.create = function (ctx, fields) { var self = this; var logger = this.server.getLogger(); var user = { username: fields.username, firstName: fields.firstName, lastName: fields.lastName, password: fields.password, roles: fields.roles }; var validator = new validation.JSONValidation(); var validationResult = validator.validate(user, { type: 'object', required: true, properties: { username: { type: 'string', required: true }, password: { type: 'string', required: true }, firstName: { type: 'string', required: true }, lastName: { type: 'string', required: true }, roles: { type: 'array', required: true, items: { type: 'string' }, enum: _.pluck(self.server.app.config.roles, 'name'), uniqueItems: true } } }); if (!validationResult.ok) { var validationErrors = validationResult.path + ': ' + validationResult.errors.join(' - '); return ctx.error(400, 'validation error on user object\n' + validationErrors); } if (!user.username.match(/^[A-Z0-9._%+-]+@(?:[A-Z0-9\-]+\.)+[A-Z]{2,4}$/i)) return ctx.error(400, 'username is not a valid email'); var authMngr = require('../cms/authMngr.js')(this.server); var createUser = function () { authMngr.create(user, function (err, result) { if (err) { if (err.message === 'username already exists') { return ctx.status(401).send({ exists: true }); } else { logger.error(err); } return ctx.error(500, err); } if (result) { ctx.json(result); } else { ctx.json(null); } }); } if (!authMngr.isAuthorized(ctx.req.session.user, 'users', 'C')) return ctx.error(401, 'Access Denied'); if (user.roles.length === 0) return ctx.error(400, 'at least one role is required'); _.each(user.roles, function (role) { if (!authMngr.isInRole(ctx.req.session.user, role)) { return ctx.error(401, 'access denied for role "' + role + '"'); } }); createUser(); };
cleanup: function () { // remove any views we already have bound _.each(this.lmViews, function(lm) {lm.remove()}); this.lmViews = []; }
_.each(players, function (player) { var data = player.getZone(); _.each(data, function (tile) { grid[tile[0]][tile[1]] = 1; }); });
describe(spec.recordType, function () { if (_.isString(spec.updatableField)) { spec.updateHash = {}; spec.updateHash[spec.updatableField] = "Test" + Math.random(); } // // Run CRUD model tests // if (!spec.skipCrud) { crud.runAllCrud(spec); } else { // even if we skip CRUD we have to create a model it('can be loaded with a zombie session', function (done) { this.timeout(40 * 1000); zombieAuth.loadApp({callback: done, verbose: false /* data.verbose */}); }); it('can be created', function () { spec.model = new XM[spec.recordType.substring(3)](); }); } // // Smoke Crud // if (!spec.skipSmoke) { smoke.runUICrud(spec); } if (!spec.skipModelConfig) { // // Verify required fields // if (spec.requiredAttributes) { _.each(spec.requiredAttributes, function (attr) { it("the " + attr + " attribute is required", function () { assert.include(spec.model.requiredAttributes, attr); }); }); } // // Verify lockability // it(spec.isLockable ? "is lockable" : "is not lockable", function () { assert.equal(spec.isLockable, spec.model.lockable); }); // // Verify inheritance // if (spec.instanceOf === "XM.Document") { it("inherits from XM.Document", function () { assert.isTrue(spec.model instanceof XM.Document); }); } else if (spec.instanceOf === "XM.Model") { it("inherits from XM.Model but not XM.Document", function () { assert.isTrue(spec.model instanceof XM.Model); assert.isFalse(spec.model instanceof XM.Document); }); } else { it("has its inheritance defined in the test spec", function () { assert.fail(); }); } // // Verify ID attribute // if (spec.idAttribute) { it("has " + spec.idAttribute + " as its idAttribute", function () { assert.equal(spec.idAttribute, spec.model.idAttribute); }); } else { it("has its id attribute defined in the test spec", function () { assert.fail(); }); } // // Verify Document Key // if (spec.documentKey) { it("has " + spec.documentKey + " as its documentKey", function () { assert.equal(spec.documentKey, spec.model.documentKey); }); } // // Make sure we're testing the enforceUpperCase (the asserts themselves are in CRUD) // it((spec.enforceUpperKey ? "Enforces" : "Does not enforce") + " uppercasing the key", function () { assert.equal(spec.model.enforceUpperKey, spec.enforceUpperKey); }); if (!_.isBoolean(spec.enforceUpperKey)) { it("has its enforceUpperKey convention defined in the test spec", function () { assert.fail(); }); } // // Verify attributes // _.each(spec.attributes, function (attr) { it("contains the " + attr + " attribute", function () { assert.include(spec.model.getAttributeNames(), attr); }); }); if (!spec.attributes || spec.attributes.length === 0) { it("has some attributes defined in the test spec", function () { assert.fail(); }); } // // Verify privileges are declared correctly by the extensions // _.each(spec.privileges, function (priv) { if (typeof priv === 'string') { _.each(spec.extensions, function (extension) { it("has privilege " + priv + " declared by the " + extension + " extension", function () { assert.isDefined(_.findWhere(XT.session.relevantPrivileges, {privilege: priv, module: extension})); }); }); /* XXX this could get tripped up by non-core extensions it("has privilege " + priv + " not declared by any other extensions", function () { var matchedPriv = _.filter(XT.session.relevantPrivileges, function (sessionPriv) { return sessionPriv.privilege === priv && !_.contains(spec.extensions, sessionPriv.module); }); assert.equal(0, matchedPriv.length); }); */ // // Make sure the privilege is translated // it("has privilege " + priv + " that is translated in the strings file", function () { var privLoc = "_" + XT.String.camelize(priv); assert.notEqual(XT.String.loc(privLoc), privLoc); }); } }); // // Verify Privileges // _.each(spec.privileges, function (priv, key) { var methodMap = { createUpdateDelete: ["canCreate", "canUpdate", "canDelete"], createUpdate: ["canCreate", "canUpdate"], create: ["canCreate"], read: ["canRead"], update: ["canUpdate"], delete: ["canDelete"] }, pertinentMethods = methodMap[key]; var updatePriv = spec.privileges.update || spec.privileges.createUpdate || spec.privileges.createUpdateDelete; it("needs " + priv + " privilege to perform action " + key, function () { var Klass = XT.getObjectByName(spec.recordType); if (_.isString(priv)) { assert.isDefined(pertinentMethods); // make sure we're testing for the priv XT.session.privileges.attributes[priv] = false; if (key === "read" && updatePriv) { // update privs are sufficient for read, so we have to toggle those too XT.session.privileges.attributes[updatePriv] = false; } _.each(pertinentMethods, function (pertinentMethod) { assert.isFalse(Klass[pertinentMethod]()); }); XT.session.privileges.attributes[priv] = true; if (key === "read" && updatePriv) { // update privs are sufficient for read, so we have to toggle those too XT.session.privileges.attributes[updatePriv] = true; } _.each(pertinentMethods, function (pertinentMethod) { assert.isTrue(Klass[pertinentMethod]()); }); } else if (_.isBoolean(priv)) { _.each(pertinentMethods, function (pertinentMethod) { assert.equal(Klass[pertinentMethod](), priv); }); } else { it("has privilege " + priv + " that's a string or boolean in the test spec", function () { assert.fail(); }); } }); }); // // Test that the collection exists // if (spec.collectionType) { it("backs the " + spec.collectionType + " collection", function () { var Collection = XT.getObjectByName(spec.collectionType), modelPrototype = Collection.prototype.model.prototype, editableModel = modelPrototype.editableModel || modelPrototype.recordType; assert.isFunction(Collection); assert.equal(editableModel, spec.recordType); }); } else if (spec.collectionType === null) { // TODO: loop through the existing collections and make sure that // none are backed by spec.recordType } else { it("has no colletion specified in the test spec", function () { assert.fail(); }); } // // Test that the cache exists // if (spec.cacheName) { it("is cached as " + spec.cacheName, function () { var cache = XT.getObjectByName(spec.cacheName); assert.isObject(cache); assert.equal(cache.model.prototype.recordType, spec.recordType); }); } else if (spec.cacheName === null) { /* TODO: probably the best thing to do is to loop through the caches and make sure that none of them are backed by spec.recordType it("is not cached", function () { }); */ } else { it("has a cache (or null for no cache) specified in the test spec", function () { assert.fail(); }); } } // TODO: verify that the cache is made available by certain extensions and not others // TODO: verify that the list is made available by certain extensions and not others if (spec.additionalTests) { spec.additionalTests(); } });
process.env.NODE_ENV = 'production'; // Map from load path to its source map. var parsedSourceMaps = {}; // Read all the source maps into memory once. _.each(serverJson.load, function (fileInfo) { if (fileInfo.sourceMap) { var rawSourceMap = fs.readFileSync( path.resolve(serverDir, fileInfo.sourceMap), 'utf8'); // Parse the source map only once, not each time it's needed. Also remove // the anti-XSSI header if it's there. var parsedSourceMap = JSON.parse(rawSourceMap.replace(/^\)\]\}'/, '')); // source-map-support doesn't ever look at the sourcesContent field, so // there's no point in keeping it in memory. delete parsedSourceMap.sourcesContent; var url; if (fileInfo.sourceMapRoot) { // Add the specified root to any root that may be in the file. parsedSourceMap.sourceRoot = path.join( fileInfo.sourceMapRoot, parsedSourceMap.sourceRoot || ''); } parsedSourceMaps[fileInfo.path] = parsedSourceMap; } }); var retrieveSourceMap = function (pathForSourceMap) { if (_.has(parsedSourceMaps, pathForSourceMap)) return { map: parsedSourceMaps[pathForSourceMap] }; return null; };
describe('Assert: Email validations', function() { it('Valid email on property should not add violation', function(done) { var constraintCollection = eValidator.Assert({ email_address: [{"assert": "Email"}] }) violationList = constraintCollection.perform.validate({email_address: '*****@*****.**'}) assert.ok(_.isNull(violationList)); done(); }) validEmailArray = [ '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**', '*****@*****.**' ]; _.each(validEmailArray, function(validEmail) { it('Valid email ['+validEmail+'] on property should not add violation', function(done) { var constraintCollection = eValidator.Assert({ email_address: [{"assert": "Email"}] }) violationList = constraintCollection.perform.validate({email_address:validEmail}) assert.ok(_.isNull(violationList)); done(); }) }); invalidEmailArray = [ 'plainaddress', '#@%^%#$@#$@#.com', '@example.com', 'Joe Smith <*****@*****.**>', 'email.example.com', 'email@example@example.com', '*****@*****.**', '*****@*****.**', '*****@*****.**', 'email@example.com (Joe Smith)', 'email@example', 'email@111.222.333.44444', '*****@*****.**', '*****@*****.**' ]; _.each(invalidEmailArray, function(invalidEmail) { it('Valid email ['+invalidEmail+'] on property should add violation', function(done) { var constraintCollection = eValidator.Assert({ email_address: [{"assert": "Email"}] }) violationList = constraintCollection.perform.validate({email_address:invalidEmail}) assert.ok(_.isArray(violationList)); assert.ok(0<violationList.length); done(); }) }); });
Fiber(function () { _.each(serverJson.load, function (fileInfo) { var code = fs.readFileSync(path.resolve(serverDir, fileInfo.path)); var Npm = { require: function (name) { if (! fileInfo.node_modules) { return require(name); } var nodeModuleDir = path.resolve(serverDir, fileInfo.node_modules, name); if (fs.existsSync(nodeModuleDir)) { return require(nodeModuleDir); } try { return require(name); } catch (e) { // Try to guess the package name so we can print a nice // error message var filePathParts = fileInfo.path.split(path.sep); var packageName = filePathParts[1].replace(/\.js$/, ''); // XXX better message throw new Error( "Can't find npm module '" + name + "'. Did you forget to call 'Npm.depends' in package.js " + "within the '" + packageName + "' package?"); } } }; var getAsset = function (assetPath, encoding, callback) { var fut; if (! callback) { fut = new Future(); callback = fut.resolver(); } // This assumes that we've already loaded the meteor package, so meteor // itself (and weird special cases like js-analyze) can't call // Assets.get*. (We could change this function so that it doesn't call // bindEnvironment if you don't pass a callback if we need to.) var _callback = Package.meteor.Meteor.bindEnvironment(function (err, result) { if (result && ! encoding) // Sadly, this copies in Node 0.10. result = new Uint8Array(result); callback(err, result); }, function (e) { console.log("Exception in callback of getAsset", e.stack); }); if (!fileInfo.assets || !_.has(fileInfo.assets, assetPath)) { _callback(new Error("Unknown asset: " + assetPath)); } else { var filePath = path.join(serverDir, fileInfo.assets[assetPath]); fs.readFile(filePath, encoding, _callback); } if (fut) return fut.wait(); }; var Assets = { getText: function (assetPath, callback) { return getAsset(assetPath, "utf8", callback); }, getBinary: function (assetPath, callback) { return getAsset(assetPath, undefined, callback); } }; // \n is necessary in case final line is a //-comment var wrapped = "(function(Npm, Assets){" + code + "\n})"; var func = require('vm').runInThisContext(wrapped, fileInfo.path, true); func.call(global, Npm, Assets); // Coffeescript }); // run the user startup hooks. _.each(__meteor_bootstrap__.startup_hooks, function (x) { x(); }); // find and run main() // XXX hack. we should know the package that contains main. var mains = []; var globalMain; if ('main' in global) { mains.push(main); globalMain = main; } _.each(Package, function (p, n) { if ('main' in p && p.main !== globalMain) { mains.push(p.main); } }); if (! mains.length) { process.stderr.write("Program has no main() function.\n"); process.exit(1); } if (mains.length > 1) { process.stderr.write("Program has more than one main() function?\n"); process.exit(1); } var exitCode = mains[0].call({}, process.argv.slice(3)); // XXX hack, needs a better way to keep alive if (exitCode !== 'DAEMON') process.exit(exitCode); }).run();
EventStore.prototype.push = function(event) { this.Log.debug({ event: event }, "New event"); _.each(this.listeners, function(listener) { listener(event); }); };
var launchMongo = function (options) { var onExit = options.onExit || function () {}; var noOplog = false; var mongod_path = files.pathJoin( files.getDevBundle(), 'mongodb', 'bin', 'mongod' ); var replSetName = 'meteor'; // Automated testing: If this is set, instead of starting mongod, we // start our stub (fake-mongod) which can then be remote-controlled // by the test. if (process.env.METEOR_TEST_FAKE_MONGOD_CONTROL_PORT) { if (options.multiple) { throw Error("Can't specify multiple with fake mongod"); } var fakeMongodCommand = process.platform === "win32" ? "fake-mongod.bat" : "fake-mongod"; mongod_path = files.pathJoin( files.getCurrentToolsDir(), 'tools', 'tests', 'fake-mongod', fakeMongodCommand); // oplog support requires sending admin commands to mongod, so // it'd be hard to make fake-mongod support it. noOplog = true; } var subHandles = []; var stopped = false; var handle = {}; var stopPromise = new Promise((resolve, reject) => { handle.stop = function () { if (stopped) { return; } stopped = true; _.each(subHandles, function (handle) { handle.stop(); }); if (options.onStopped) { options.onStopped(); } reject(new StoppedDuringLaunch); }; }); var yieldingMethod = function (object, methodName, ...args) { return Promise.race([ stopPromise, new Promise((resolve, reject) => { object[methodName](...args, (err, res) => { err ? reject(err) : resolve(res); }); }) ]).await(); }; var launchOneMongoAndWaitForReadyForInitiate = function (dbPath, port, portFile) { files.mkdir_p(dbPath, 0o755); var proc = null; if (options.allowKilling) { findMongoAndKillItDead(port, dbPath); } if (options.multiple) { // This is only for testing, so we're OK with incurring the replset // setup on each startup. files.rm_recursive(dbPath); files.mkdir_p(dbPath, 0o755); } else if (portFile) { var portFileExists = false; var matchingPortFileExists = false; try { matchingPortFileExists = +(files.readFile(portFile)) === port; portFileExists = true; } catch (e) { if (!e || e.code !== 'ENOENT') { throw e; } } // If this is the first time we're using this DB, or we changed port since // the last time, then we want to destroy any existing replSet // configuration and create a new one. First we delete the "local" // database if it exists. (It's a pain and slow to change the port in an // existing replSet configuration. It's also a little slow to initiate a // new replSet, thus the attempt to not do it unless the port changes.) // // In the "multiple" case, we just wipe out the entire database and incur // the cost, because this won't affect normal users running meteor. if (!matchingPortFileExists) { // Delete the port file if it exists, so we don't mistakenly believe // that the DB is still configured. if (portFileExists) { files.unlink(portFile); } try { var dbFiles = files.readdir(dbPath); } catch (e) { if (!e || e.code !== 'ENOENT') { throw e; } } _.each(dbFiles, function (dbFile) { if (/^local\./.test(dbFile)) { files.unlink(files.pathJoin(dbPath, dbFile)); } }); } } // Let's not actually start a process if we yielded (eg during // findMongoAndKillItDead) and we decided to stop in the middle (eg, because // we're in multiple mode and another process exited). if (stopped) { return; } proc = spawnMongod(mongod_path, port, dbPath, replSetName); function stop() { if (proc) { proc.removeListener('exit', procExitHandler); proc.kill('SIGINT'); proc = null; } } require("../tool-env/cleanup.js").onExit(stop); subHandles.push({ stop }); var procExitHandler = fiberHelpers.bindEnvironment(function (code, signal) { // Defang subHandle.stop(). proc = null; // Kill any other processes too. This will also remove // procExitHandler from the other processes, so onExit will only be called // once. handle.stop(); // Invoke the outer onExit callback. onExit(code, signal, stderrOutput, detectedErrors); }); proc.on('exit', procExitHandler); var listening = false; var replSetReadyToBeInitiated = false; var replSetReady = false; var maybeReadyToTalk; var readyToTalkPromise = new Promise(function (resolve) { maybeReadyToTalk = function () { if (resolve && listening && (noOplog || replSetReadyToBeInitiated || replSetReady)) { proc.stdout.removeListener('data', stdoutOnData); resolve(); resolve = null; } }; }); var stopOrReadyPromise = Promise.race([ stopPromise, readyToTalkPromise, ]); var detectedErrors = {}; var stdoutOnData = fiberHelpers.bindEnvironment(function (data) { // note: don't use "else ifs" in this, because 'data' can have multiple // lines if (/\[initandlisten\] Did not find local replica set configuration document at startup/.test(data) || /\[.*\] Locally stored replica set configuration does not have a valid entry for the current node/.test(data)) { replSetReadyToBeInitiated = true; maybeReadyToTalk(); } if (/ \[.*\] waiting for connections on port/.test(data)) { listening = true; maybeReadyToTalk(); } if (/ \[rsSync-0\] transition to primary complete/.test(data)) { replSetReady = true; maybeReadyToTalk(); } if (/Insufficient free space/.test(data)) { detectedErrors.freeSpace = true; } // Running against a old mmapv1 engine, probably from pre-mongo-3.2 Meteor if (/created by the 'mmapv1' storage engine, so setting the active storage engine to 'mmapv1'/.test(data)) { Console.warn(); Console.warn('Your development database is using mmapv1, ' + 'the old, pre-MongoDB 3.0 database engine. ' + 'You should consider upgrading to Wired Tiger, the new engine. ' + 'The easiest way to do so in development is to run ' + Console.command('meteor reset') + '. ' + "If you'd like to migrate your database, please consult " + Console.url('https://docs.mongodb.org/v3.0/release-notes/3.0-upgrade/')) Console.warn(); } if (/Invalid or no user locale set/.test(data)) { detectedErrors.badLocale = true; } }); proc.stdout.setEncoding('utf8'); proc.stdout.on('data', stdoutOnData); var stderrOutput = ''; proc.stderr.setEncoding('utf8'); proc.stderr.on('data', function (data) { stderrOutput += data; }); stopOrReadyPromise.await(); }; var initiateReplSetAndWaitForReady = function () { try { // Load mongo so we'll be able to talk to it. const { MongoClient, Server } = loadIsopackage('npm-mongo').NpmModuleMongodb; // Connect to the intended primary and start a replset. const client = new MongoClient( new Server('127.0.0.1', options.port, { poolSize: 1, socketOptions: { connectTimeoutMS: 60000 } }) ); yieldingMethod(client, 'connect'); const db = client.db('meteor'); if (stopped) { return; } var configuration = { _id: replSetName, version: 1, protocolVersion: 1, members: [{_id: 0, host: '127.0.0.1:' + options.port, priority: 100}] }; try { const config = yieldingMethod(db.admin(), "command", { replSetGetConfig: 1, }).config; // If a replication set configuration already exists, it's // important that the new version number is greater than the old. if (config && _.has(config, "version")) { configuration.version = config.version + 1; } } catch (e) {} if (options.multiple) { // Add two more members: one of which should start as secondary but // could in theory become primary, and one of which can never be // primary. configuration.members.push({ _id: 1, host: '127.0.0.1:' + (options.port + 1), priority: 5 }); configuration.members.push({ _id: 2, host: '127.0.0.1:' + (options.port + 2), priority: 0 }); } try { yieldingMethod(db.admin(), 'command', { replSetInitiate: configuration, }); } catch (e) { if (e.message === 'already initialized') { yieldingMethod(db.admin(), 'command', { replSetReconfig: configuration, force: true, }); } else { throw Error("rs.initiate error: " + e.message); } } if (stopped) { return; } let writableTimestamp = Date.now(); // Wait until the primary is writable. If it isn't writable after one // minute, throw an error and report the replica set status. while (!stopped) { const { ismaster } = yieldingMethod(db.admin(), "command", { isMaster: 1 }); if (ismaster) { break; } else if (Date.now() - writableTimestamp > 60000) { const status = yieldingMethod(db.admin(), "command", { replSetGetStatus: 1 }); throw new Error( "Primary not writable after one minute. Last replica set status: " + JSON.stringify(status) ); } utils.sleepMs(50); } client.close(true /* means "the app is closing the connection" */); } catch (e) { // If the process has exited, we're doing another form of error // handling. No need to throw random low-level errors farther. if (!stopped || (e instanceof StoppedDuringLaunch)) { throw e; } } }; try { if (options.multiple) { var dbBasePath = files.pathJoin(options.projectLocalDir, 'dbs'); _.each(_.range(3), function (i) { // Did we get stopped (eg, by one of the processes exiting) by now? Then // don't start anything new. if (stopped) { return; } var dbPath = files.pathJoin(options.projectLocalDir, 'dbs', ''+i); launchOneMongoAndWaitForReadyForInitiate(dbPath, options.port + i); }); if (!stopped) { initiateReplSetAndWaitForReady(); } } else { var dbPath = files.pathJoin(options.projectLocalDir, 'db'); var portFile = !noOplog && files.pathJoin(dbPath, 'METEOR-PORT'); launchOneMongoAndWaitForReadyForInitiate(dbPath, options.port, portFile); if (!stopped && !noOplog) { initiateReplSetAndWaitForReady(); if (!stopped) { // Write down that we configured the database properly. files.writeFile(portFile, options.port); } } } } catch (e) { if (!(e instanceof StoppedDuringLaunch)) { throw e; } } if (stopped) { return null; } return handle; };
this.getRelated(rel, function (err, results) { _.each(results, function (data, key) { item[key] = data; }); callback(err, results); }, true);
addAll: function() { _.each(this.collection.models, this.addOne, this); this.hideLoading(); },
* Copyright © 2013-2017 Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ 'use strict'; var combo = require('./combo'), themes = require('./themes'), _ = require('underscore'); var themeOptions = {}; _.each(themes, function(theme, name) { themeOptions[name] = { 'files': [ '<%= combo.autopath(\''+name+'\', path.pub) %>/**/*.less' ], 'tasks': 'less:' + name }; }); var watchOptions = { 'setup': { 'files': '<%= path.less.setup %>/**/*.less', 'tasks': 'less:setup' }, 'updater': { 'options': { livereload: true }, 'files': '<%= path.less.updater %>/**/*.less', 'tasks': 'less:updater'
var getJsnbtObject = function () { var result = { name: app.config.name, version: app.version, localization: app.localization, ssl: app.ssl }; var applyArrayInObject = function (selfName, resultName, identifier) { result[resultName] = {}; _.each(app.config[selfName], function (selfItem) { result[resultName][selfItem[identifier]] = selfItem; }); }; result.fileGroups = app.config.fileGroups; result.modules = {}; _.each(app.modules.all, function (module) { if (module.domain !== 'core') { result.modules[module.domain] = { name: typeof (module.getName) === 'function' ? module.getName() : undefined, domain: module.domain, version: typeof (module.getVersion) === 'function' ? module.getVersion() : undefined, pointed: module.pointed, section: module.section, browsable: module.browsable === undefined || module.browsable === true } if (module.config) { extend(true, result.modules[module.domain], module.config); } } }); result.lists = app.config.lists; result.injects = app.config.injects; applyArrayInObject('entities', 'entities', 'name'); applyArrayInObject('roles', 'roles', 'name'); applyArrayInObject('sections', 'sections', 'name'); applyArrayInObject('templates', 'templates', 'id'); applyArrayInObject('layouts', 'layouts', 'id'); applyArrayInObject('containers', 'containers', 'id'); applyArrayInObject('routes', 'routes', 'id'); applyArrayInObject('languages', 'languages', 'code'); applyArrayInObject('countries', 'countries', 'code'); result.content = app.config.content; result.messaging = { mail: {}, sms: {} }; for (var mailImplementationName in app.config.messaging.mail.implementations) { var mailImplementation = app.config.messaging.mail.implementations[mailImplementationName]; result.messaging.mail[mailImplementationName] = { domain: mailImplementationName, name: mailImplementation.provider, settingsTmpl: mailImplementation.settingsTmpl } } for (var smsImplementationName in app.config.messaging.sms.implementations) { var smsImplementation = app.config.messaging.sms.implementations[smsImplementationName]; result.messaging.sms[smsImplementationName] = { domain: smsImplementationName, name: smsImplementation.provider, settingsTmpl: smsImplementation.settingsTmpl } } result.languages = {}; _.each(languages, function (language) { result.languages[language.code] = language; }); result.countries = {}; _.each(countries, function (country) { result.countries[country.code] = country; }); result.collections = {}; Object.keys(app.config.collections).forEach(function (collection) { result.collections[collection] = {}; if (app.config.collections[collection].default) { result.collections[collection].default = app.config.collections[collection].default; } if (app.config.collections[collection].permissions) { result.collections[collection].permissions = app.config.collections[collection].permissions; } }); return result; }
exports = module.exports = function(req, res) { var sendResponse = function(status) { res.json(status); }; var sendError = function(key, err, msg) { msg = msg || 'API Error'; key = key || 'unknown error'; msg += ' (' + key + ')'; console.log(msg + (err ? ':' : '')); if (err) { console.log(err); } res.status(500); sendResponse({ error: key || 'error', detail: err ? err.message : '' }); }; switch (req.params.action) { case 'autocomplete': var limit = req.query.limit || 50; var page = req.query.page || 1; var skip = limit * (page - 1); var filters = req.list.getSearchFilters(req.query.q); var count = req.list.model.count(filters); var query = req.list.model.find(filters) .limit(limit) .skip(skip) .sort(req.list.defaultSort); if (req.query.context === 'relationship') { var srcList = keystone.list(req.query.list); if (!srcList) return sendError('invalid list provided'); var field = srcList.fields[req.query.field]; if (!field) return sendError('invalid field provided'); _.each(req.query.filters, function(value, key) { query.where(key).equals(value ? value : null); count.where(key).equals(value ? value : null); }); } count.exec(function(err, total) { if (err) return sendError('database error', err); query.exec(function(err, items) { if (err) return sendError('database error', err); sendResponse({ total: total, items: items.map(function(i) { return { name: req.list.getDocumentName(i, false) || '(' + i.id + ')', id: i.id }; }) }); }); }); break; case 'order': if (!keystone.security.csrf.validate(req)) { return sendError('invalid csrf'); } var order = req.query.order || req.body.order; var queue = []; if ('string' === typeof order) { order = order.split(','); } _.each(order, function(id, i) { queue.push(function(done) { req.list.model.update({ _id: id }, { $set: { sortOrder: i } }, done); }); }); async.parallel(queue, function(err) { if (err) return sendError('database error', err); return sendResponse({ success: true }); }); break; case 'create': if (!keystone.security.csrf.validate(req)) { return sendError('invalid csrf'); } var item = new req.list.model(); var updateHandler = item.getUpdateHandler(req); var data = (req.method === 'POST') ? req.body : req.query; if (req.list.nameIsInitial) { if (req.list.nameField.validateInput(data)) { req.list.nameField.updateItem(item, data); } else { updateHandler.addValidationError(req.list.nameField.path, 'Name is required.'); } } updateHandler.process(data, { flashErrors: true, logErrors: true, fields: req.list.initialFields }, function(err) { if (err) { return sendResponse({ success: false, err: err }); } else { return sendResponse({ success: true, name: req.list.getDocumentName(item, false), id: item.id }); } }); break; } };
exports.exec = function() { var loggerFn, cb, opts if(arguments.length === 1) { cb = arguments[0]; } else if(arguments.length === 2) { cb = arguments[0]; opts = arguments[1]; } else if(arguments.length === 3) { loggerFn = arguments[0]; cb = arguments[1]; opts = arguments[2]; } loggerFn = loggerFn || addFileLoggers; // Process command line args. var cwd = process.cwd(); program.option('-C, --cluster', 'run in cluster'). option('-c, --config <configFile>', 'path to config', cwd + '/../config/dev.json'). option('-p, --port <port>', 'port to bind to', 3000). option('-m, --monPort <monPort>', 'port for monitoring', 3001). option('-t, --tables <tables>', 'path of dir containing tables', cwd + '/tables'). option('-r, --routes <routes>', 'path of dir containing routes', cwd + '/routes'). option('-x, --xformers <xformers>', 'path of dir containing xformers', cwd + '/config/xformers.json'). option('-a, --ecvPath <ecvPath>', 'ecv path', '/ecv'). option('-n, --noWorkers <noWorkers>', 'no of workers', os.cpus.length). option('-e, --disableConsole', 'disable the console', false). option('-q, --disableQ', 'disable /q', false); if(opts) { _.each(opts, function(opt) { program.option(opt[0], opt[1], opt[2], opt[3]); }) } program.parse(process.argv); var options = { cluster: program.cluster, port: parseInt(program.port), monPort: parseInt(program.monPort), config: program.config, tables: program.tables, routes: program.routes, xformers: program.xformers, disableConsole: program.disableConsole, disableQ: program.disableQ, noWorkers: program.noWorkers, ecvPath: program.ecvPath, 'request-id': program.requestId || 'Request-ID', loggerFn: loggerFn, ecv: { monitor: '/tables', validator: function(status, headers, data) { return JSON.parse(data); } } }; options.__proto__ = program; if(process.argv.indexOf('stop') >= 0) { cluster2.stop(options); } else if(process.argv.indexOf('shutdown') >= 0) { cluster2.shutdown(options); } else { var emitter; cluster2.listen(options, function(cb2) { createConsole(options, function(app, e) { emitter = e; cb2(app); }) }, function(app) { if(cb) { cb(app, program, emitter); } }); } }
var _parseFilter = function(model, filter, parentAlias, parentField) { // Set data var self = this; var whereClause = ''; var filterAlreadyFullfill = false; // If AND if(filter['$and'] instanceof Array) { _.each(filter['$and'], function(andItem, andIndex) { // Stop if AND item is empty if(_.keys(andItem).length === 0) { return; } // Add AND item whereClause += '( ' + _parseFilter.call(self, model, andItem, parentAlias, parentField) + ' ) '; // Add AND keyword if(andIndex < filter['$and'].length-1) { whereClause += 'AND '; } }); } // If OR else if(filter['$or'] instanceof Array) { _.each(filter['$or'], function(orItem, orIndex) { // Stop if OR item is empty if(_.keys(orItem).length === 0) { return; } // Add OR item whereClause += '( ' + _parseFilter.call(self, model, orItem, parentAlias, parentField) + ' ) '; // Add OR keyword if(orIndex < filter['$or'].length-1) { whereClause += 'OR '; } }); } // If $key is defined else if(typeof filter['$key'] !== 'undefined') { // Set data var filterKey = { '$before_key': filter['$before_key'] || '', '$after_key': filter['$after_key'] || '', '$key': filter['$key'] }; var filterValue = filter['$value']; // Parse filter whereClause += _parseFilterItem.call(self, model, filterKey, filterValue, parentAlias, parentField); } else { // Parse item for each filter keys var filterKeys = _.keys(filter); _.each(filterKeys, function(filterKey, filterKeyIndex) { // Set data var filterValue = filter[filterKey]; // If $where is defined if(typeof objectPath.get(filterValue, '$where') !== 'undefined') { // Set data var fieldsToParse = {}; var customField = _.clone(filterValue); // Delete where from custom field delete customField['$where']; // Get parent hash alias item var parentHashAliasItem = _.find(self.hashAlias, function(obj) { return obj.alias === parentAlias; }); // Get some field path var absoluteFieldPath = _.reject([ parentHashAliasItem.key, filterKey ], function(obj) { return !obj; }).join('.'); var fieldsToParsePath = absoluteFieldPath.split('.').join('.$fields.').split('.'); // Set field to parse var fieldsToParse = {}; // Set fields to parse objectPath.set(fieldsToParse, fieldsToParsePath, customField); // Parse fields _parseFields.call(self, model, fieldsToParse, parentAlias, parentField); // Add where whereClause += _parseFilterItem.call(self, model, filterValue['$as'], filterValue['$where'], parentAlias, parentField, true); } else { // Parse filter item whereClause += _parseFilterItem.call(self, model, filterKey, filterValue, parentAlias, parentField); } // Add OR keyword if(filterKeyIndex < filterKeys.length-1) { whereClause += 'AND '; } }); } // Trim where clause whereClause = whereClause.trim(); // Return where clause return whereClause; };
var Model = require('../../models.js').model; var APIURL = "http://opendata.brussels.be/api/records/1.0/search?"; var DATASETS = [ { ds: "cultural-places", nameField: "cultural_place" }, // TODO(mla): check why the API does not accept multiple // dataset records search. //{ ds: "bxl_urinals", nameField: "urinoir" } ]; var execcmd = true; if(execcmd && (argv.id || argv.i)) { var dataSets = ""; var dataSetsMap = {}; _.each(DATASETS,function(d){dataSets+="&dataset="+(d.ds); dataSetsMap[d.ds]=d; }); http.get(APIURL+"rows=10000"+dataSets, function(res) { var result = '', length=0; res.on('data', function(chunk) { length += chunk.length; process.stdout.write("Downloading " + length + " bytes\r"); result += chunk; }); res.on('end', function() { var datares = JSON.parse(result); console.log("\nend.",datares); _.each(datares.records,function(r){ var poi = {};
_.each(fields, function(fieldValue, fieldKey) { // If field is $all if(fieldKey === '$all') { _.each(structureColumns, function(columnItem) { self.queryBuilderColumns[parentAlias + '.' + columnItem] = parentAlias + '$' + columnItem; self.requestedFields.push(parentAlias + '$' + columnItem); }); } // If field is structure column else if(_.contains(structureColumns, fieldKey)) { self.queryBuilderColumns[parentAlias + '.' + fieldKey] = parentAlias + '$' + fieldKey; self.requestedFields.push(parentAlias + '$' + fieldKey); } // If field is one2one relationship else if(_.contains(oneToOneRelationshipKeys, fieldKey)) { // Set data var fieldRelationship = model.getRelationship(fieldKey); var fieldModelStructureColumns = fieldRelationship.model.getStructureColumns(); // Set alias var joinAliasKey = parentField ? (parentField + '.' + fieldKey) : fieldKey; var joinAliasItem = _.find(self.hashAlias, function(obj) { return obj.key === joinAliasKey; }); // If join already exists if(typeof joinAliasItem !== 'undefined') { // Set join alias var joinAlias = joinAliasItem.alias; } // If join does not exists else { // Set join alias var joinAlias = parentAlias + '$' + 'T' + self.aliasIncrement; // Add alias to hash alias self.aliasIncrement++; self.hashAlias.push({ alias: joinAlias, key: joinAliasKey, model: fieldRelationship.model }); // Add left join if(fieldRelationship.type === 'belongsTo') { self.queryBuildingTasks[0].push(function() { self.queryBuilder.addJoin('left', [ fieldRelationship.model.getTableName() + ' AS ' + joinAlias, joinAlias + '.' + (fieldRelationship.key || fieldRelationship.model.getPrimaryKey()), parentAlias + '.' + fieldRelationship.foreignKey ]); }); } else if(fieldRelationship.type === 'hasOne') { self.queryBuildingTasks[0].push(function() { self.queryBuilder.addJoin('left', [ fieldRelationship.model.getTableName() + ' AS ' + joinAlias, parentAlias + '.' + (fieldRelationship.key || model.getPrimaryKey()), joinAlias + '.' + fieldRelationship.foreignKey ]); }); } } // Check if value request more if(objectPath.get(fieldValue, '$fields')) { _parseFields.call(self, fieldRelationship.model, fieldValue['$fields'], joinAlias, joinAliasKey); } else { _parseFields.call(self, fieldRelationship.model, { '$all': 1 }, joinAlias, joinAliasKey); } } // If field is one2one custom relationship else if(_.contains(oneToOneCustomRelationshipKeys, fieldKey)) { // Set data var fieldRelationship = model.getRelationship(fieldKey); var fieldRelationshipModel = fieldRelationship.model['$model']; var fieldRelationshipAlias = fieldRelationship.model['$alias']; var fieldModelStructureColumns = fieldRelationshipModel.getStructureColumns(); var joinAliasKey = parentField ? (parentField + '.' + fieldKey) : fieldKey; var joinAlias = parentAlias + '$' + 'T' + self.aliasIncrement; var aliasRefs = {}; // Add alias to hash alias aliasRefs[fieldRelationshipAlias] = joinAlias; self.aliasIncrement++; self.hashAlias.push({ alias: joinAlias, key: joinAliasKey, model: fieldRelationshipModel }); // Set data var joinRaw = ''; // Loop over joinRaw _.each(fieldRelationship.joinRaw, function(joinItem) { // Add to ref if(!aliasRefs[joinItem['$alias']]) { aliasRefs[joinItem['$alias']] = joinAlias + '__' + joinItem['$alias']; } // If item is string if(typeof joinItem === 'string') { joinRaw += joinItem; } // If item is $string else if(typeof joinItem === 'object' && _.difference([ '$string' ], _.keys(joinItem)).length === 0) { joinRaw += String(joinItem['$string']); } // If item is $model/$alias object else if(typeof joinItem === 'object' && _.difference([ '$model', '$alias' ], _.keys(joinItem)).length === 0) { joinRaw += '`' + joinItem['$model'].getTableName() + '` AS `' + aliasRefs[joinItem['$alias']] + '`'; } // If item is $key/$alias object else if(typeof joinItem === 'object' && _.difference([ '$key', '$alias' ], _.keys(joinItem)).length === 0) { joinRaw += '`' + aliasRefs[joinItem['$alias']] + '`.`' + joinItem['$key'] + '`'; } // If item is $alias object else if(typeof joinItem === 'object' && _.difference([ '$alias' ], _.keys(joinItem)).length === 0) { joinRaw += '`' + aliasRefs[joinItem['$alias']] + '`'; } // If item is $key object else if(typeof joinItem === 'object' && _.difference([ '$key' ], _.keys(joinItem)).length === 0) { // Set column raw joinRaw += '`' + parentAlias + '`.`' + joinItem['$key'] + '`'; // Add query builder columns self.queryBuilderColumns[parentAlias + '.' + joinItem['$key']] = parentAlias + '$' + joinItem['$key']; } }); // Add to subquery building tasks self.queryBuildingTasks[0].push(function() { self.queryBuilder.addJoin('raw', joinRaw); self.queryBuilder.addGroupByRaw('`' + parentAlias + '`.`' + model.getPrimaryKey() + '`'); }); // Check if value request more if(objectPath.get(fieldValue, '$fields')) { _parseFields.call(self, fieldRelationshipModel, fieldValue['$fields'], joinAlias, joinAliasKey); } else { _parseFields.call(self, fieldRelationshipModel, { '$all': 1 }, joinAlias, joinAliasKey); } } // If field is one2many relationship and is grouped else if(_.contains(oneToManyRelationshipKeys, fieldKey) && objectPath.get(fieldValue, '$as')) { // Set data var fieldRelationship = model.getRelationship(fieldKey); // Set alias var calculatedColumn = parentAlias + '$' + fieldValue['$as']; var joinAlias = parentAlias + '$' + 'T' + self.aliasIncrement; var joinAliasKey = parentField ? (parentField + '.' + fieldKey) : fieldKey; var relationKey = fieldRelationship.key || model.getPrimaryKey(); // Add query builder columns self.queryBuilderColumns[parentAlias + '.' + relationKey] = parentAlias + '$' + relationKey; // Add alias to hash alias self.aliasIncrement++; self.hashAlias.push({ alias: joinAlias, key: joinAliasKey, model: fieldRelationship.model }); // Push to query building tasks self.queryBuildingTasks[1].push(function() { // If already exists if(_.contains(self.requestedFields, calculatedColumn)) { return; } // Set data var subqueryBuilder = new QueryBuilder(); // Set from subqueryBuilder.setFromRaw(self.queryBuilder); subqueryBuilder.setSource({ type: 'groupedOneToMany', field: fieldValue }); // Add column subqueryBuilder.addColumn(knex.raw('`' + self.queryBuilder.getAlias() + '`.*')); // Create subjoin var subjoinFilter = fieldValue['$filter'] || {}; var subjoinFields = {}; var subjoinTableName = fieldRelationship.model.getTableName(); var subjoinAlias = 'T1'; // Add foreign key to subjoin fields subjoinFields[fieldRelationship.foreignKey] = 1; // Set subjoin fields if(objectPath.get(fieldValue, '$count')) { var subJoinOnField = fieldRelationship.foreignKey; } else if(objectPath.get(fieldValue, '$sum')) { var subJoinOnField = fieldValue['$sum']; } else if(objectPath.get(fieldValue, '$avg')) { var subJoinOnField = fieldValue['$avg']; } // Add on key to subjoin fields subjoinFields[subJoinOnField] = 1; // Add calculated column if(objectPath.get(fieldValue, '$count')) { subqueryBuilder.addColumn(knex.raw('COUNT(`' + subjoinAlias + '`.`' + subJoinOnField + '`) AS `' + calculatedColumn + '`')); } else if(objectPath.get(fieldValue, '$sum')) { subqueryBuilder.addColumn(knex.raw('SUM(`' + subjoinAlias + '`.`' + subJoinOnField + '`) AS `' + calculatedColumn + '`')); } else if(objectPath.get(fieldValue, '$avg')) { subqueryBuilder.addColumn(knex.raw('AVG(`' + subjoinAlias + '`.`' + subJoinOnField + '`) AS `' + calculatedColumn + '`')); } // Add join subqueryBuilder.addJoin('left', [ subjoinTableName + ' AS ' + subjoinAlias, self.queryBuilder.getAlias() + '.' + parentAlias + '$' + relationKey, subjoinAlias + '.' + fieldRelationship.foreignKey ]); // Add group by subqueryBuilder.addGroupByRaw('`' + self.queryBuilder.getAlias() + '`.`' + parentAlias + '$' + relationKey + '`'); // Add required field self.requestedFields.push(calculatedColumn); // Set query builder self.queryBuilder = subqueryBuilder; }); } // If field is custom else if(_.contains(customKeys, fieldKey)) { // Set data var fieldCustom = model.getCustom(fieldKey); var customAlias = parentAlias + '$' + fieldKey; var subqueryBuilder = new QueryBuilder(); subqueryBuilder.setSource({ type: 'customKey', field: fieldKey }); var subqueryBuildingTasks = []; // Set from subqueryBuildingTasks.push(function() { subqueryBuilder.setFromRaw(self.queryBuilder); subqueryBuilder.addColumn(knex.raw('`' + self.queryBuilder.getAlias() + '`.*')); }); // Add required field subqueryBuildingTasks.push(function() { self.requestedFields.push(customAlias); }); // If field is provided if(fieldCustom.data['$field'] instanceof Array) { // Set data var columnRaw = ''; // Loop over field _.each(fieldCustom.data['$field'], function(fieldItem) { // If item is string if(typeof fieldItem === 'string') { columnRaw += fieldItem; } // If item is string else if(typeof fieldItem === 'object' && _.difference([ '$string' ], _.keys(fieldItem)).length === 0) { columnRaw += String(fieldItem['$string']); } // If item is object else if(typeof fieldItem === 'object' && _.difference([ '$key', '$alias' ], _.keys(fieldItem)).length === 0) { columnRaw += '`' + fieldItem['$alias'] + '`.`' + fieldItem['$key'] + '`'; } // If item is string else if(typeof fieldItem === 'object' && _.difference([ '$key' ], _.keys(fieldItem)).length === 0) { // Set column raw columnRaw += '`' + '$QB$' + '`.`' + parentAlias + '$' + fieldItem['$key'] + '`'; // Add query builder columns self.queryBuilderColumns[parentAlias + '.' + fieldItem['$key']] = parentAlias + '$' + fieldItem['$key']; } }); // Add column subqueryBuildingTasks.push(function() { // Add query builder alias to column raw columnRaw = columnRaw.replace(/`\$QB\$`/g, self.queryBuilder.getAlias()); // Add column subqueryBuilder.addColumn(knex.raw(columnRaw + ' AS `' + customAlias + '`')); }); } // If join is provided if(fieldCustom.data['$join'] instanceof Array) { // Loop over join _.each(fieldCustom.data['$join'], function(joinItem) { // Set data var joinItemType = joinItem['$type'] || 'inner'; var joinItemModel = joinItem['$model']; var joinItemAlias = joinItem['$alias']; var joinItemKey = joinItem['$key']; var joinItemOn = joinItem['$on']; var joinItemTable = joinItemModel.getTableName(); // Add query builder columns self.queryBuilderColumns[parentAlias + '.' + joinItemOn] = parentAlias + '$' + joinItemOn; // Add join subqueryBuildingTasks.push(function() { subqueryBuilder.addJoin(joinItemType, [ joinItemTable + ' AS ' + joinItemAlias, self.queryBuilder.getAlias() + '.' + parentAlias + '$' + joinItemOn, joinItemAlias + '.' + joinItemKey ]); }); }); } // If joinRaw is provided else if(fieldCustom.data['$joinRaw'] instanceof Array) { // Set data var joinRaw = ''; // Loop over joinRaw _.each(fieldCustom.data['$joinRaw'], function(joinItem) { // If item is string if(typeof joinItem === 'string') { joinRaw += joinItem; } // If item is $string else if(typeof joinItem === 'object' && _.difference([ '$string' ], _.keys(joinItem)).length === 0) { joinRaw += String(joinItem['$string']); } // If item is $model/$alias object else if(typeof joinItem === 'object' && _.difference([ '$model', '$alias' ], _.keys(joinItem)).length === 0) { joinRaw += '`' + joinItem['$model'].getTableName() + '` AS `' + joinItem['$alias'] + '`'; } // If item is $key/$alias object else if(typeof joinItem === 'object' && _.difference([ '$key', '$alias' ], _.keys(joinItem)).length === 0) { joinRaw += '`' + joinItem['$alias'] + '`.`' + joinItem['$key'] + '`'; } // If item is $key object else if(typeof joinItem === 'object' && _.difference([ '$key' ], _.keys(joinItem)).length === 0) { // Set column raw joinRaw += '`' + '$QB$' + '`.`' + parentAlias + '$' + joinItem['$key'] + '`'; // Add query builder columns self.queryBuilderColumns[parentAlias + '.' + joinItem['$key']] = parentAlias + '$' + joinItem['$key']; } }); // Add to subquery building tasks subqueryBuildingTasks.push(function() { // Add query builder alias to join raw joinRaw = joinRaw.replace(/`\$QB\$`/g, self.queryBuilder.getAlias()); // Add join subqueryBuilder.addJoin('raw', joinRaw); }); } // Add query builder columns self.queryBuilderColumns[self.mainAlias + '.' + self.mainModel.getPrimaryKey()] = self.mainAlias + '$' + self.mainModel.getPrimaryKey(); self.queryBuilderColumns[parentAlias + '.' + model.getPrimaryKey()] = parentAlias + '$' + model.getPrimaryKey(); // Add group by subqueryBuildingTasks.push(function() { subqueryBuilder.addGroupByRaw('`' + self.queryBuilder.getAlias() + '`.`' + self.mainAlias + '$' + self.mainModel.getPrimaryKey() + '`'); subqueryBuilder.addGroupByRaw('`' + self.queryBuilder.getAlias() + '`.`' + parentAlias + '$' + model.getPrimaryKey() + '`'); }); // Push to query building tasks self.queryBuildingTasks[1].push(function() { // If already exists if(_.contains(self.requestedFields, customAlias)) { return; } // Run subquery building tasks _.each(subqueryBuildingTasks, function(fun) { return fun(); }); // Set query builder self.queryBuilder = subqueryBuilder; }); } // If field is one2many relationship else if(_.contains(oneToManyRelationshipKeys, fieldKey)) { // Set data var fieldRelationship = model.getRelationship(fieldKey); var relationKey = fieldRelationship.key || model.getPrimaryKey(); // Add required field self.queryBuilderColumns[parentAlias + '.' + relationKey] = parentAlias + '$' + relationKey; self.requestedFields.push(parentAlias + '$' + relationKey); } // If field is one2many custom relationship else if(_.contains(oneToManyCustomRelationshipKeys, fieldKey)) { // Set data var fieldRelationship = model.getRelationship(fieldKey); var relationKey = fieldRelationship.key || model.getPrimaryKey(); // Add required field self.queryBuilderColumns[parentAlias + '.' + relationKey] = parentAlias + '$' + relationKey; self.requestedFields.push(parentAlias + '$' + relationKey); } // If field is undefined else { invalidQueryData[fieldKey] = 'unknown'; } });
/** * Create the navigation sidebar. * @param {object} members The members that will be used to create the sidebar. * @param {array<object>} members.classes * @param {array<object>} members.externals * @param {array<object>} members.globals * @param {array<object>} members.mixins * @param {array<object>} members.modules * @param {array<object>} members.namespaces * @param {array<object>} members.tutorials * @param {array<object>} members.events * @return {string} The HTML for the navigation sidebar. */ function buildNav(members) { var nav = []; // merge namespaces and classes, then sort var merged = members.namespaces.concat(members.classes); merged.sort(function (a, b) { if (a.longname > b.longname) return 1; if (a.longname < b.longname) return -1; return 0; }); _.each(merged, function (v) { // exclude 'olx' and interfaces from sidebar if (v.longname.indexOf('olx') !== 0 && v.interface !== true) { if (v.kind == 'namespace') { nav.push({ type: 'namespace', longname: v.longname, name: v.name, members: find({ kind: 'member', memberof: v.longname }), methods: find({ kind: 'function', memberof: v.longname }), typedefs: find({ kind: 'typedef', memberof: v.longname }), events: find({ kind: 'event', memberof: v.longname }) }); } if (v.kind == 'class') { nav.push({ type: 'class', longname: v.longname, name: v.name, members: find({ kind: 'member', memberof: v.longname }), methods: find({ kind: 'function', memberof: v.longname }), typedefs: find({ kind: 'typedef', memberof: v.longname }), fires: v.fires, events: find({ kind: 'event', memberof: v.longname }) }); } } }); return nav; }
Fiber(function () { _.each(serverJson.load, function (fileInfo) { if (!shouldLoadPackage(fileInfo)) { return; } var code = fs.readFileSync(path.resolve(serverDir, fileInfo.path)); var Npm = { /** * @summary Require a package that was specified using * `Npm.depends()`. * @param {String} name The name of the package to require. * @locus Server * @memberOf Npm */ require: function (name) { if (!fileInfo.node_modules) { return require(name); } var nodeModuleBase = path.resolve(serverDir, files.convertToOSPath(fileInfo.node_modules)); var nodeModuleDir = path.resolve(nodeModuleBase, name); // If the user does `Npm.require('foo/bar')`, then we should resolve to // the package's node modules if `foo` was one of the modules we // installed. (`foo/bar` might be implemented as `foo/bar.js` so we // can't just naively see if all of nodeModuleDir exists. if (fs.existsSync(path.resolve(nodeModuleBase, name.split("/")[0]))) { return require(nodeModuleDir); } try { return require(name); } catch (e) { // Try to guess the package name so we can print a nice // error message // fileInfo.path is a standard path, use files.pathSep var filePathParts = fileInfo.path.split(files.pathSep); var packageName = filePathParts[1].replace(/\.js$/, ''); // XXX better message throw new Error( "Can't find npm module '" + name + "'. Did you forget to call 'Npm.depends' in package.js " + "within the '" + packageName + "' package?"); } } }; var getAsset = function (assetPath, encoding, callback) { var fut; if (!callback) { fut = new Future(); callback = fut.resolver(); } // This assumes that we've already loaded the meteor package, so meteor // itself can't call Assets.get*. (We could change this function so that // it doesn't call bindEnvironment if you don't pass a callback if we need // to.) var _callback = Package.meteor.Meteor.bindEnvironment(function (err, result) { if (result && !encoding) // Sadly, this copies in Node 0.10. result = new Uint8Array(result); callback(err, result); }, function (e) { console.log("Exception in callback of getAsset", e.stack); }); // Convert a DOS-style path to Unix-style in case the application code was // written on Windows. assetPath = files.convertToStandardPath(assetPath); if (!fileInfo.assets || !_.has(fileInfo.assets, assetPath)) { _callback(new Error("Unknown asset: " + assetPath)); } else { var filePath = path.join(serverDir, fileInfo.assets[assetPath]); fs.readFile(files.convertToOSPath(filePath), encoding, _callback); } if (fut) return fut.wait(); }; var Assets = { getText: function (assetPath, callback) { return getAsset(assetPath, "utf8", callback); }, getBinary: function (assetPath, callback) { return getAsset(assetPath, undefined, callback); } }; // \n is necessary in case final line is a //-comment var wrapped = "(function(Npm, Assets){" + code + "\n})"; // It is safer to use the absolute path when source map is present as // different tooling, such as node-inspector, can get confused on relative // urls. // fileInfo.path is a standard path, convert it to OS path to join with // __dirname var fileInfoOSPath = files.convertToOSPath(fileInfo.path); // The final 'true' is an undocumented argument to runIn[Foo]Context that // causes it to print out a descriptive error message on parse error. It's // what require() uses to generate its errors. var func = require('vm').runInThisContext(wrapped, fileInfoOSPath, true); func.call(global, Npm, Assets); // Coffeescript }); global.Npm = { /** * @summary Require a package that was specified using * `Npm.depends()`. * @param {String} name The name of the package to require. * @locus Server * @memberOf Npm */ require: function (name) { return require(name); } }; var getAsset = function (assetPath, encoding, callback) { var fut; if (!callback) { fut = new Future(); callback = fut.resolver(); } // This assumes that we've already loaded the meteor package, so meteor // itself can't call Assets.get*. (We could change this function so that // it doesn't call bindEnvironment if you don't pass a callback if we need // to.) var _callback = Package.meteor.Meteor.bindEnvironment(function (err, result) { if (result && !encoding) // Sadly, this copies in Node 0.10. result = new Uint8Array(result); callback(err, result); }, function (e) { console.log("Exception in callback of getAsset", e.stack); }); // Convert a DOS-style path to Unix-style in case the application code was // written on Windows. assetPath = files.convertToStandardPath(assetPath); var filePath = path.join(serverDir, 'assets/app', assetPath); try { fs.readFile(files.convertToOSPath(filePath), encoding, _callback); } catch (error) { _callback(new Error("Unknown asset: " + assetPath)); } if (fut) return fut.wait(); }; global.Assets = { getText: function (assetPath, callback) { return getAsset(assetPath, "utf8", callback); }, getBinary: function (assetPath, callback) { return getAsset(assetPath, undefined, callback); } }; wallaby.start(); }).run();