/** * 构造器。 */ function FavUsers(config) { config = Config.get(module.id, config); var dir = Directory.root(); var userId = config.userId; var data = { 'userId': userId, 'dir': dir.slice(0, -1), }; var html = config.html; var json = config.json; var host = config.host; var url = host + config.url; url = Url.addQueryString(url, { 'size': 100000, }); //增大每页的记录数,以便一次性全取回来。 var meta = { 'userId': userId, 'url': url, 'host': host, 'cache': config.cache, 'html': { 'file': $.String.format(html.file, data), 'write': html.write, }, 'json': { 'file': $.String.format(json.file, data), 'write': json.write, }, 'emitter': new Emitter(this), }; this.meta = meta; }
// Get cassandra configuration options function getCassandraConfig() { const { keyspace, replication, dseUsername, dsePassword } = config.get('cassandra'); return { keyspace, replication, dseUsername, dsePassword }; }
import test from 'selenium-webdriver/testing'; import config from 'config'; import assert from 'assert'; import * as driverManager from '../lib/driver-manager.js'; import LoginFlow from '../lib/flows/login-flow.js'; import ThemesPage from '../lib/pages/themes-page.js'; import ThemePreviewPage from '../lib/pages/theme-preview-page.js'; import CustomizerPage from '../lib/pages/customizer-page.js'; import ThemeDialogComponent from '../lib/components/theme-dialog-component.js'; const mochaTimeOut = config.get( 'mochaTimeoutMS' ); const startBrowserTimeoutMS = config.get( 'startBrowserTimeoutMS' ); const explicitWait = config.get( 'explicitWaitMS' ); const screenSize = driverManager.currentScreenSize(); var driver; test.before( 'Start Browser', function() { this.timeout( startBrowserTimeoutMS ); driver = driverManager.startBrowser(); } ); test.describe( 'Themes: (' + screenSize + ')', function() { this.timeout( mochaTimeOut ); this.bailSuite( true ); test.describe( 'Switching Themes:', function() {
function renderPage() { var sampleQuery1 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\nPREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\nPREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\nPREFIX frbr: <http://purl.org/vocab/frbr/core#>\nPREFIX dct: <http://purl.org/dc/terms/>\n\nSELECT ?work ?date ?title WHERE {\n\t?work a frbr:Work .\n\t?work dct:title ?title .\n\t?work dct:created ?date .\n\tFILTER (?date >= '2010-10-15'^^xsd:date)\n}\nORDER BY desc(?date)\nLIMIT 100"; var sampleQuery2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\nPREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n\nCONSTRUCT {?s ?p ?o}\nWHERE {\n\tGRAPH <http://www.legislation.gov.uk/id/uksi/2010/2581>\n\t{?s ?p ?o}\n}"; var sampleQuery3 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\nPREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\nPREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\nPREFIX gzt: <http://www.gazettes-online.co.uk/ontology#>\n\nSELECT ?n WHERE {\n\t?n a gzt:Notice .\n\t?n gzt:hasPublicationDate ?d .\n\tFILTER (?d >= '2010-09-01'^^xsd:date)\n}\nORDER BY ?d\nLIMIT 100"; var prefixes = config.get('prefixes') var namespaces = Object.keys(prefixes).map((prefixName) => { return { name: prefixName, prefix: prefixName, uri: prefixes[prefixName] } }) var flintConfig = { namespaces: namespaces, endpoints: endpoints, interface: { toolbar: true, menu: true }, "defaultEndpointParameters" : { "queryParameters" : { "format" : "output", "query" : "query", "update" : "update" }, "selectFormats" : [ { "name" : "SPARQL-XML", "format" : "sparql", "type" : "application/sparql-results+xml" }, { "name" : "JSON", "format" : "json", "type" : "application/sparql-results+json" } ], "constructFormats" : [ { "name" : "RDF/XML", "format" : "rdfxml", "type" : "application/rdf+xml" }, { "name" : "Turtle", "format" : "turtle", "type" : "application/turtle" } ] }, defaultModes: [ { "name" : "SPARQL 1.1 Query", "mode" : "sparql11query" }] } res.send(pug.renderFile('template/sparql.jade', extend(defaultLocals(), { currentPage: 'sparql', flintConfig: flintConfig }))) }
var express = require('express'); var router = express.Router(); var http = require('request-promise-json'); var Promise = require('promise'); var UrlPattern = require('url-pattern'); var oauth = require('../server/js/oauth.js'); var config = require('config'); var session; var api_url = new UrlPattern('(:protocol)\\://(:host)(/:org)(/:cat)(:api)/(:operation)'); var _myApp = config.get('Application'); var _apiServer = config.get('API-Server'); var _apiServerOrg = ((_apiServer.org == "") || (typeof _apiServer.org == 'undefined')) ? undefined : _apiServer.org; var _apiServerCatalog = ((_apiServer.catalog == "") || (typeof _apiServer.catalog == 'undefined')) ? undefined : _apiServer.catalog; var _apis = config.get('APIs'); /* Handle the request for calculating shipping cost */ router.get('/shipping/:zip', function (req, res) { session = req.session; setShipCalcOptions(req, res) .then(submitCalcShipReq) .catch(renderErrorPage) .done(); }); /* Handle the request for finding nearest store */ router.get('/stores/:zip', function (req, res) { session = req.session;
/** * Event handlers for phase product create, update and delete. * Current functionality just updates the elasticsearch indexes. */ import config from 'config'; import _ from 'lodash'; import Promise from 'bluebird'; import util from '../../util'; const ES_PROJECT_INDEX = config.get('elasticsearchConfig.indexName'); const ES_PROJECT_TYPE = config.get('elasticsearchConfig.docType'); const eClient = util.getElasticSearchClient(); /** * Handler for phase product creation event * @param {Object} logger logger to log along with trace id * @param {Object} msg event payload * @param {Object} channel channel to ack, nack * @returns {undefined} */ const phaseProductAddedHandler = Promise.coroutine(function* (logger, msg, channel) { // eslint-disable-line func-names try { const data = JSON.parse(msg.content.toString()); const doc = yield eClient.get({ index: ES_PROJECT_INDEX, type: ES_PROJECT_TYPE, id: data.projectId }); const phases = _.isArray(doc._source.phases) ? doc._source.phases : []; // eslint-disable-line no-underscore-dangle _.each(phases, (phase) => { if (phase.id === data.phaseId) { phase.products = _.isArray(phase.products) ? phase.products : []; // eslint-disable-line no-param-reassign
// index.js // // Handles daemonizing and clustering. In each worker process control is passed to ./lib/main.js. // // Config: { // daemon: true, // workers: 1 // } var config = require('config'); if (config.get('daemon')){ require('daemon')(); } var cluster = require('cluster'), log = require('./lib/logging'); if (config.get('cluster') && cluster.isMaster){ require('./lib/pidfile').write(); function spawn_child(){ var w = cluster.fork(); w.on('message', function(m){ // If you want to set up handling for messages from child processes, do that here. log.warn('cluster: Unhandled message from child: ' + JSON.stringify(m)); }); log.warn('cluster: spawned worker ' + w.id + ' (PID ' + w.process.pid + ')'); return w; }
var app = express(); app.set('port', process.env.PORT || 5000); app.set('view engine', 'ejs'); app.use(bodyParser.json({ verify: verifyRequestSignature })); app.use(express.static('public')); /* * Be sure to setup your config values before running this code. You can * set them using environment variables or modifying the config file in /config. * */ // App Secret can be retrieved from the App Dashboard const APP_SECRET = (process.env.MESSENGER_APP_SECRET) ? process.env.MESSENGER_APP_SECRET : config.get('appSecret'); // Arbitrary value used to validate a webhook const VALIDATION_TOKEN = (process.env.MESSENGER_VALIDATION_TOKEN) ? (process.env.MESSENGER_VALIDATION_TOKEN) : config.get('validationToken'); // Generate a page access token for your page from the App Dashboard const PAGE_ACCESS_TOKEN = (process.env.MESSENGER_PAGE_ACCESS_TOKEN) ? (process.env.MESSENGER_PAGE_ACCESS_TOKEN) : config.get('pageAccessToken'); // URL where the app is running (include protocol). Used to point to scripts and // assets located at this address. const SERVER_URL = (process.env.SERVER_URL) ? (process.env.SERVER_URL) :
}, plugins: { filename: '[name].bundle.css', }, }; } var plugins = [ new CleanWebpackPlugin(['dist'], { root: path.join(__dirname, 'public'), verbose: true, }), new webpack.DefinePlugin({ 'process.env': { NODE_ENV: JSON.stringify(config.util.getEnv('NODE_ENV')), BASE_URL: JSON.stringify(config.get('BASE_URL')), API_URL: JSON.stringify(config.get('API_URL')), }, }), new ExtractTextPlugin(constants.plugins.filename), new webpack.ProvidePlugin({ $: 'jquery', jQuery: 'jquery', }), ]; if (config.util.getEnv('NODE_ENV') == 'production') { plugins.push( new webpack.optimize.UglifyJsPlugin({ compressor: { warnings: false,
before(function(done) { mongoose.connect(config.get('db.conn'), function(error) { if (error) console.error('Error while connecting:\n%\n', error); done(error); }); });
// App Configuration: // node-config module uses the process.env.NODE_CONFIG_DIR to determine the config directory. // if (!process.env.NODE_CONFIG_DIR) { process.env.NODE_CONFIG_DIR = configPath; } console.log("Configuration Directory: %s", process.env.NODE_CONFIG_DIR); var config = require('config'); console.log("Config Base: " + config.util.getEnv('NODE_CONFIG_DIR')); console.log("Config File: " + config.util.getEnv('NODE_ENV')); // // Logger Initialization // var loggerConfig = config.get("logger"); log4js.configure(loggerConfig); // // Framework Components // var dbFactory = require('./framework/DBFactory'); var serviceFactory = require('./framework/ServiceFactory'); var interceptorFactory = require('./framework/InterceptorFactory'); var controllerFactory = require('./framework/ControllerFactory'); // // Dump Environment Information //
'use strict'; var fs = require('fs'), path = require('path'), mongoose = require('mongoose'), db = {}, config = require('config'); mongoose.connect(config.get('db.uri')); // import all file in this dir, except index.js fs.readdirSync(__dirname) .filter(function(file) { return (file.indexOf('.') !== 0) && (file !== 'index.js'); }) .forEach(function(file) { var model = require(path.join(__dirname, file)); db[model.modelName] = model; }); db.mongoose = mongoose; module.exports = db;
Q.spawn(function* initProcess() { try { self.network = networks[config.get('server.network')] if (_.isUndefined(self.network)) { throw new Error('Unknow server.network: ' + config.get('server.network')) } /** create bitcoind client and check network */ self.bitcoindClient = new bitcoind.Client({ host: config.get('bitcoind.host'), port: config.get('bitcoind.port'), user: config.get('bitcoind.user'), pass: config.get('bitcoind.password'), timeout: 60000 }) self.bitcoind = Q.nbind(self.bitcoindClient.cmd, self.bitcoindClient) var bitcoindInfo = (yield self.bitcoind('getinfo'))[0] var configNetwork = config.get('server.network') var configNetworkIsTestnet = configNetwork.indexOf('testnet', configNetwork.length - 7) !== -1 if (configNetworkIsTestnet !== bitcoindInfo.testnet) { throw new Error('bitcoind and ewallet-server have different networks') } /** create storage */ switch (config.get('server.storage')) { case 'mongo': var MongoStorage = require('./storage/mongo') self.storage = new MongoStorage() break case 'postgres': var PostgresStorage = require('./storage/postgres') self.storage = new PostgresStorage() break case 'redis': throw new Error('Redis not supported now...') var RedisStorage = require('./storage/redis') self.storage = new RedisStorage() break default: throw new Error('Unknow storage: ', config.get('server.storage')) } yield self.storage.initialize() /** load headers and set last block hash */ self.chunksCache = [] logger.verbose('Loading headers from storage...') var headers = yield self.storage.getAllHeaders() headers.forEach(self.pushHeader.bind(self)) self.updateLastBlockHash() /** sync storage with bitcoind */ self.syncStatus = { status: 'sync', progress: { count: self.getBlockCount(), total: self.getBlockCount() } } yield self.catchUp() self.syncStatus.status = 'finished' /** catch up new blocks and get info from mempool */ self.mempool = {txIds: {}, spent: {}, addrs: {}, coins: {}} self.on('newHeight', function () { logger.verbose('clear mempool') self.mempool = {txIds: {}, spent: {}, addrs: {}, coins: {}} }) process.nextTick(self.mainIteration.bind(self)) /** done */ logger.info('Blockchain ready, current height: %s', self.getBlockCount() - 1) deferred.resolve() } catch (error) { deferred.reject(error) } })
'use strict'; const fs = require('fs'); const path = require('path'); const config = require('config'); const Sequelize = require('sequelize'); const log = require('../libraries/log'); const options = config.get('database'); options.logging = log.db.debug; const sequelize = new Sequelize(options.database, options.username, options.password, options); const db = module.exports = {}; fs.readdirSync(__dirname) .filter((filename) => filename !== 'index.js' && filename.substr(-3) === '.js') .forEach((filename) => { const model = sequelize.import(path.join(__dirname, filename)); db[model.name] = model; }); Object.keys(db).forEach((modelName) => { if (modelName.substr(0, 1) !== '_' && 'associate' in db[modelName]) { db[modelName].associate(db); } });
const config = require('config'); const nodemon = require('nodemon'); const browserSync = require('browser-sync'); const livereload = require('livereload'); const livereloadServer = livereload.createServer(); const nodemonLogPrefix = '[\u001b[34mNodemon\u001b[39m]'; const livereloadLogPrefix = '[\u001b[34mLive Reload\u001b[39m]'; let nodemonChangedFiles = []; browserSync({ ui: { port: config.get('browserSync.uiPort'), weinre: { port: config.get('browserSync.weinrePort') } }, files: ['public/**'], server: false, proxy: config.get('web.hostname') + ':' + config.get('web.port'), port: config.get('browserSync.port'), open: false, reloadDelay: 200 }); nodemon( { watch: ['src-server/**/*', 'config/*'],
'use strict'; var config = require('config'); var request = require('co-request'); var logger = require('logger'); var co = require('co'); var mongoose = require('mongoose'); var uriMigrate = process.env.MIGRATE_URI || config.get('migrate.uri'); var mongoUri = process.env.MONGO_URI || 'mongodb://' + config.get('mongodb.host') + ':' + config.get('mongodb.port') + '/' + config.get('mongodb.database'); let User = require('models/user'); var nextCursor = null; var obtainData = function*(cursor) { let url = uriMigrate; if (cursor) { url += '?cursor=' + cursor; } logger.debug('Doing request to ', url); var response = yield request({ url: url, method: 'GET', json: true }); return response.body; }; var transformAndSaveData = function *(list){ if(list){ for(let i =0; i< list.length; i++){ let element = list[i]; let oauth = element.auth_ids[0].split(':'); if(oauth[0] === 'google#plus'){
'use strict'; const config = require('config'); const fs = require('fs'); const path = require('path'); const routePath = `${config.get('nitro.basePath')}project/routes/`; const viewDataPath = `${config.get('nitro.basePath')}project/viewData/`; const routers = []; function readRoutes(routes) { fs.readdirSync(routes).forEach((el) => { if (path.extname(el) === '.js') { routers.push(require(routes + path.basename(el, '.js'))); } }); } readRoutes(routePath); readRoutes(viewDataPath); module.exports = function (app) { routers.forEach((routedefinition) => { routedefinition(app); }); };
return BBPromise.try(function () { return bcrypt.genSaltAsync(config.get('Hoist.security.passwordStrength')); }).bind(this).then(function (salt) {
const Sequelize = require('sequelize'); const DataTypes = require('sequelize/lib/data-types'); const config = require('config'); const merge = require('../../lib/merge'); const logger = require('../../log4js/index'); require('babel-polyfill'); //////////////////////////////////////////////////////////////// // 通过 Sequelize 第三方模块连接 MYSQL 数据库 // 用于防止 SQL 注入等 /////////////////////////////////////////////////////////////// Sequelize.LONGTEXT = DataTypes.LONGTEXT = DataTypes.TEXT; if (config.get('mysql_db').config.dialect === 'mysql') { Sequelize.LONGTEXT = DataTypes.LONGTEXT = 'LONGTEXT'; } ///////////////////////////////////////////////////////// // 开始实例化 sequelize //////////////////////////////////////////////////////// const sequelize = () => { return new Sequelize( config.get('mysql_db').config.database, config.get('mysql_db').config.username, config.get('mysql_db').config.password, {
[Pre.prepareTextsForView] ] }; exports.endpoints = [ { path: "/{lang}/cartografia", method: "GET", config: internals.config.cartografia }, // IMPORTANT: this path below is now being served directly by nginx { path: '/cartografia/{anyPath*}', method: 'GET', handler: { directory: { path: Path.join(Config.get("rootDir"), "lib/web/client/cartografia") } }, config: { auth: false, } }, ];
function getUserListFrom(configId) { // console.log('Searching:', configId); api.users.list(null, configId, function getUserCB(error, response, body) { var handledBody = helper.handleCB(error, response, body); parseResponse(configId, handledBody); }); } //////////////////////////////////////////////////////////////////////////////// console.log('Lookup user helper tool'); program .version('0.0.1') .description('get direct links to users') .option('--email [email]', 'User\'s email address to look for') .parse(process.argv); if (!process.argv.slice(2).length) { program.outputHelp(); } else { if (program.email) { var cfgList = config.get('configArr'); console.log('Searching for user:'******'across', cfgList.length, 'accounts.'); // Get the list of configs cfgList.forEach(function(configId) { getUserListFrom(configId); }); } }
var express = require('express'); var router = express.Router(); var params = require('parameters-middleware'); var config= require('config'); var jwt = require('jwt-simple'); var ObjectId = require('mongoose').Types.ObjectId; var moment= require('moment'); var async= require('async'); var db=require('../db/DbSchema'); var events = require('../events'); var log = require('tracer').colorConsole(config.get('log')); var apn=require('../notificationSenders/apnsender'); var gcm=require('../notificationSenders/gcmsender'); var listingsLogic=require('../logic/listings'); router.get('/companies', function(req,res,next){ listingsLogic.getList(req,res) .then(function(rows){ res.json(rows); }) .catch(function(err){ res.status(err.status).json(err.message); }) }); router.get('/bugs', function(){ listingsLogic.getBounties(req,res) .then(function(rows){
// call the packages we need var mongoose = require('mongoose'); var mongodbUri = require('mongodb-uri'); var express = require('express'); var path = require('path'); process.env.NODE_CONFIG_DIR= path.resolve(__dirname, './config'); var config = require('config'); var DBUrl = mongodbUri.format(config.get('mongodb')); var debug = require('debug')('core:appindex'); var responseTime = require('response-time'); var favicon = require('serve-favicon'); var morgan = require('morgan'); var cookieParser = require('cookie-parser'); var bodyParser = require('body-parser'); var compression = require('compression'); var helmet = require('helmet'); var userAgentDevice = require('express-device'); var routesWebsite = require('./routes/website'); var routesApi = require('./routes/api'); var errorHandler = require('./express-middleware/errorhandler'); var app = express(); // compress all requests app.use(compression());
var environment = require('execution-environment'); var PushNotificationSchema = require(path.join(__dirname, 'lib', 'schema')); var KueWorker = require(path.join(__dirname, 'lib', 'kue', 'worker')); var PushNotification; //configure execution-environment if (!environment.isLocal) { environment.registerEnvironments({ isLocal: ['test', 'dev', 'development'] }); } //obtain push configuration from node-config var _config = config.has('push') ? config.get('push') : {}; //obtain model name var modelName = (_config.model || {}).name || 'PushNotification'; // initialize mongoose push notification model try { //setup kue queue if available if (_config.kue && _.isPlainObject(_config.kue)) { //require kue var kue = require('kue');
/* eslint-disable no-continue */ import fs from 'fs'; import path from 'path'; import { assert } from 'chai'; import config from 'config'; import glob from 'glob'; import { langToLocale, localeToLang } from 'core/i18n/utils'; const langs = config.get('langs'); const basePath = config.get('basePath'); describe('Locale Config', () => { // eslint-disable-next-line no-restricted-syntax for (const lang of langs) { // eslint-disable no-loop-func it(`should have a corresponding ${lang} dir in locale`, () => fs.lstatSync(path.join(basePath, 'locale', langToLocale(lang)))); } // eslint-disable-next-line no-restricted-syntax for (const localeDir of glob.sync('locale/*')) { const locale = path.basename(localeDir); const lang = localeToLang(locale); if (locale === 'templates') { continue; } // eslint-disable no-loop-func it(`should have a corresponding ${lang} entry the locale dir in the config`, () =>
import assert from 'assert'; import webdriver from 'selenium-webdriver'; import test from 'selenium-webdriver/testing'; import config from 'config'; import WebDriverJsDemoPage from '../lib/webdriver-js-demo-page.js'; import WebDriverJsErrorPage from '../lib/webdriver-js-error-page.js'; import WebDriverJsLeavePage from '../lib/webdriver-js-leave-page.js'; let driver = null; const mochaTimeoutMS = config.get( 'mochaTimeoutMS' ); test.before( function() { this.timeout( mochaTimeoutMS ); let pref = new webdriver.logging.Preferences(); pref.setLevel( 'browser', webdriver.logging.Level.SEVERE ); driver = new webdriver.Builder().forBrowser( 'chrome' ).setLoggingPrefs( pref ).build(); } ); test.describe( 'WebDriverJsDemo', function() { this.timeout( mochaTimeoutMS ); test.it( 'can wait for an element to appear', function() { let page = new WebDriverJsDemoPage( driver, true ); page.waitForChildElementToAppear(); page.childElementPresent().then( ( present ) => { assert.equal( present, true, 'The child element is not present' ); } ); } ); test.it( 'can check for an alert when leaving the page', function() {
/* * Role of this service * Collect tasks * 1. countForSave or * 2. delayTime * and then save * */ var config = require('config'), worker = require('./worker'), configService = require('./config'), kue = require('kue'); var tasks = kue.createQueue(); //init connection to mongo database require('common/mongooseDb'); tasks.process(config.get("queues:tasks:savePost"), configService.workerCount, worker);
'use strict'; const bodyParser = require('body-parser'), config = require('config'), crypto = require('crypto'), express = require('express'), https = require('https'), request = require('request'); var app = express(); //facebook sdk for get user inforamtion. var sdk = require('facebook-node-sdk'); var fb = new sdk({ appId: config.get('AppId'), secret: config.get('appSecret') }).setAccessToken(config.get('pageAccessToken')); app.set('port', process.env.PORT || 5000); app.use(bodyParser.json({ verify: verifyRequestSignature })); app.use(express.static('public')); /* * Be sure to setup your config values before running this code. You can * set them using environment variables or modifying the config file in /config. * */ // App Secret can be retrieved from the App Dashboard const APP_SECRET = (process.env.MESSENGER_APP_SECRET) ?
function TransactionService(opts) { opts = _.extend({}, opts); this.database = opts.database || Promise.promisifyAll(new LevelUp(config.get('LevelUp'))); this.rpc = opts.rpc || Promise.promisifyAll(new RPC(config.get('RPC'))); }
run : function() { var appKeys = config.get('observerKeys'); var doppelUsers = config.get('doppelUsers'); this.observeStream(twitterService.createClient(appKeys), doppelUsers); },