Beispiel #1
0
test('test github repos that use `standard`', function (t) {
  t.plan(URLS.length)

  mkdirp.sync(TMP)

  // test an empty repo
  mkdirp.sync(path.join(TMP, 'empty'))

  parallelLimit(Object.keys(MODULES).map(function (name) {
    var url = MODULES[name]
    var folder = path.join(TMP, name)
    return function (cb) {
      fsAccess(path.join(TMP, name), fs.R_OK | fs.W_OK, function (err) {
        var args = err
          ? [ 'clone', '--depth', 1, url, path.join(TMP, name) ]
          : [ 'pull' ]
        var opts = err
          ? {}
          : { cwd: folder }
        spawn(GIT, args, opts, function (err) {
          if (err) return cb(err)

          spawn(STANDARD, [], { cwd: folder }, function (err) {
            t.error(err, name)
            cb(null)
          })
        })
      })
    }
  }), PARALLEL_LIMIT, function (err) {
    if (err) throw err
  })
})
Beispiel #2
0
 var results = await new Promise((resolve, reject) => {
     parallelLimit(filez2.map(entry => {
         return function(cb) {
             exports.renderDocument(
                 entry.result.config,
                 entry.result.basedir,
                 entry.result.fpath,
                 entry.result.renderTo,
                 entry.result.renderToPlus,
                 entry.result.renderBaseMetadata
             )
             .then((result) => {
                 // log(`render renderDocument ${result}`);
                 cb(undefined, { result });
             })
             .catch(err => {
                 // console.error(`render renderDocument ${err} ${err.stack}`);
                 cb(undefined, { error: err });
             });
         };
     }), 
     config.concurrency, // Concurrency count
     function(err, results) {
         // gets here on final results
         if (err) reject(err);
         else resolve(results);
     });
 });
Beispiel #3
0
Torrent.prototype._verifyPieces = function () {
  var self = this
  parallelLimit(self.pieces.map(function (_, index) {
    return function (cb) {
      if (self.destroyed) return cb(new Error('torrent is destroyed'))
      self.store.get(index, function (err, buf) {
        if (err) return process.nextTick(cb, null) // ignore error
        sha1(buf, function (hash) {
          if (hash === self._hashes[index]) {
            if (!self.pieces[index]) return
            self._debug('piece verified %s', index)
            self._markVerified(index)
          } else {
            self._debug('piece invalid %s', index)
          }
          cb(null)
        })
      })
    }
  }), FILESYSTEM_CONCURRENCY, function (err) {
    if (err) return self._destroy(err)
    self._debug('done verifying')
    self._onStore()
  })
}
Beispiel #4
0
 return function status () {
   var c = get('config')
   if (get('args').length) {
     throw new Error('unknown arg')
   }
   var ret = {
     thoughts: 0,
     ticks: 0,
     logs: 0
   }
   var tasks = []
   Object.keys(ret).forEach(function (k) {
     tasks.push(function (done) {
       get('db').collection(k).count({app_name: get('app_name')}, function (err, result) {
         if (err) return done(err)
         ret[k] = result
         done()
       })
     })
   })
   parallel(tasks, c.parallel_limit, function (err) {
     if (err) throw err
     get('logger').info('status', ret, {feed: 'status'})
     get('app').close(function () {
       process.exit()
     })
   })
 }
Beispiel #5
0
test('test github repos that use `standard`', function (t) {
  t.plan(testPackages.length)

  mkdirp.sync(TMP)

  parallelLimit(testPackages.map(function (pkg) {
    var name = pkg.name
    var url = pkg.repo + '.git'
    var folder = path.join(TMP, name)
    return function (cb) {
      access(path.join(TMP, name), fs.R_OK | fs.W_OK, function (err) {
        if (argv.offline) {
          if (err) {
            t.pass('SKIPPING (offline): ' + name + ' (' + pkg.repo + ')')
            return cb(null)
          }
          runStandard(cb)
        } else {
          downloadPackage(function (err) {
            if (err) return cb(err)
            runStandard(cb)
          })
        }

        function downloadPackage (cb) {
          if (err) gitClone(cb)
          else gitPull(cb)
        }

        function gitClone (cb) {
          var args = [ 'clone', '--depth', 1, url, path.join(TMP, name) ]
          spawn(GIT, args, { stdio: 'ignore' }, function (err) {
            if (err) err.message += ' (git clone) (' + name + ')'
            cb(err)
          })
        }

        function gitPull (cb) {
          var args = [ 'pull' ]
          spawn(GIT, args, { cwd: folder, stdio: 'ignore' }, function (err) {
            if (err) err.message += ' (git pull) (' + name + ')'
            cb(err)
          })
        }

        function runStandard (cb) {
          var args = [ '--verbose' ]
          if (pkg.args) args.push.apply(args, pkg.args)
          spawn(STANDARD, args, { cwd: folder }, function (err) {
            var str = name + ' (' + pkg.repo + ')'
            if (err) { t.fail(str) } else { t.pass(str) }
            cb(null)
          })
        }
      })
    }
  }), PARALLEL_LIMIT, function (err) {
    if (err) throw err
  })
})
Beispiel #6
0
    parallelLimit(tasks, 10, function (err, results) {
        if (err) {
            return cb(err);
        }

        opts.dev = false;

        // remove purgeExcess result
        results.pop();

        var incorrects = results.filter(function (dep) {
            return !dep.correct;
        });
        var corrects = results.filter(function (dep) {
            return dep.correct;
        });

        /*  for each incorrect

             - install it
             - remove excess
             - force install all children


        */
        var inCorrectTasks = incorrects.map(function (incorrect) {
            var name = incorrect.name;
            var folder = path.join(nodeModules,
                name, 'node_modules');

            return series.bind(null, [
                installModule.bind(
                    null, nodeModules, incorrect, opts),
                forceInstall.bind(null, folder, incorrect, opts)
            ]);
        });
        var correctTasks = corrects.map(function (correct) {
            var name = correct.name;
            var folder = path.join(nodeModules, name,
                'node_modules');

            return forceInstall.bind(
                null, folder, correct, opts);
        });

        /* for each correct

            - force install all children
        */

        var tasks = [].concat(inCorrectTasks, correctTasks);

        parallelLimit(tasks, 10, cb);
    });
Beispiel #7
0
Torrent.prototype.getFileModtimes = function (cb) {
  var self = this
  var ret = []
  parallelLimit(self.files.map(function (file, index) {
    return function (cb) {
      fs.stat(path.join(self.path, file.path), function (err, stat) {
        if (err && err.code !== 'ENOENT') return cb(err)
        ret[index] = stat && stat.mtime.getTime()
        cb(null)
      })
    }
  }), FILESYSTEM_CONCURRENCY, function (err) {
    self._debug('done getting file modtimes')
    cb(err, ret)
  })
}
Beispiel #8
0
 glob(path.join(initDocsPath, '/**/*'), (err, res) => {
   if (err) {
     throw err
   }
   const index = __dirname.lastIndexOf('/')
   parallelLimit(res.map((element) => (callback) => {
     const addPath = element.substring(index + 1, element.length)
     if (!fs.statSync(element).isDirectory()) {
       const rs = new Readable()
       rs.push(fs.readFileSync(element))
       rs.push(null)
       const filePair = {path: addPath, content: rs}
       i.write(filePair)
     }
     callback()
   }), 10, (err) => {
     if (err) {
       throw err
     }
     i.end()
   })
 })
// Reads telemetry log files in parallel, then produces a summary array, one
// entry for each day: [{date, actives, retention, ...}, ...]
function loadTelemetrySummary (logFiles, cb) {
  console.log('Summarizing ' + logFiles.length + ' telemetry log files')

  const tasks = logFiles.map(function (filename) {
    return function (cb) {
      // Read each telemetry log file, one per day...
      const filePath = path.join(TELEMETRY_PATH, filename)
      console.log('Reading ' + filename)
      fs.readFile(filePath, 'utf8', function (err, json) {
        if (err) return cb(err)

        // Each log file contains one JSON record per line
        console.log('Parsing ' + filename)

        const lines = json.trim().split('\n')
        const records = lines
          .map(function (line, i) {
            try {
              return JSON.parse(line)
            } catch (err) {
              console.error('Skipping invalid line %s:%d', filename, i + 1)
              console.error(err)
              return null
            }
          })
          .filter(Boolean)

        console.log('Read ' + records.length + ' rows from ' + filename)
        cb(null, summarizeDailyTelemetryLog(filename, records))
      })
    }
  })

  runParallelLimit(tasks, PARALLEL_LIMIT, function (err, days) {
    if (err) return cb(err)
    cb(null, combineDailyTelemetrySummaries(days))
  })
}
Beispiel #10
0
    Machine.command(args, (err, stdout) => {
      if (err) return done(err)

      const machines = stdout.split(NEWLINE).filter(Boolean).map(line => {
        const values = line.split(LIST_COLUMNS_SEP)
        const machine = {}

        LIST_COLUMNS.forEach((name, i) => {
          const key = camelCase(name)
          const val = values[i]

          machine[key] = val === '' ? null : decodeURIComponent(val)
        })

        // ResponseTime is in nanoseconds
        machine.responseTime = parseInt(machine.responseTime) / 1e6
        machine.state = machine.state.toLowerCase()
        machine.activeHost = machine.activeHost === 'true'
        machine.activeSwarm = machine.activeSwarm === 'true'

        if (machine.dockerVersion === 'Unknown') {
          machine.dockerVersion = null
        }

        return machine
      })

      if (!opts.inspect) return done(null, machines)

      // Add additional metadata from `docker-machine inspect <name>`
      parallel(machines.map(machine => next => {
        Machine.inspect(machine.name, (err, data) => {
          if (err) next(err)
          else next(null, xtend(machine, data))
        })
      }), 4, done)
    })
test('test github repos that use `standard`', function (t) {
  t.plan(testPackages.length)

  mkdirp.sync(TMP)

  // test an empty repo
  mkdirp.sync(path.join(TMP, 'empty'))

  parallelLimit(testPackages.map(function (pkg) {
    var name = pkg.name
    var url = pkg.repo + '.git'
    var folder = path.join(TMP, name)
    return function (cb) {
      fsAccess(path.join(TMP, name), fs.R_OK | fs.W_OK, function (err) {
        var gitArgs = err
          ? [ 'clone', '--depth', 1, url, path.join(TMP, name) ]
          : [ 'pull' ]
        var gitOpts = err
          ? {}
          : { cwd: folder }
        spawn(GIT, gitArgs, gitOpts, function (err) {
          if (err) {
            err.message += ' (' + name + ')'
            return cb(err)
          }

          spawn(STANDARD, [ '--verbose' ], { cwd: folder }, function (err) {
            t.error(err, name)
            cb(null)
          })
        })
      })
    }
  }), PARALLEL_LIMIT, function (err) {
    if (err) throw err
  })
})
Beispiel #12
0
 glob(path.join(initDocsPath, '/**/*'), function (err, res) {
   if (err) {
     throw err;
   }
   var index = __dirname.lastIndexOf('/');
   parallelLimit(res.map(function (element) {
     return function (callback) {
       var addPath = element.substring(index + 1, element.length);
       if (!fs.statSync(element).isDirectory()) {
         var rs = new Readable();
         rs.push(fs.readFileSync(element));
         rs.push(null);
         var filePair = { path: addPath, content: rs };
         i.write(filePair);
       }
       callback();
     };
   }), 10, function (err) {
     if (err) {
       throw err;
     }
     i.end();
   });
 });
Beispiel #13
0
 utils.getIPFS((err, ipfs) => {
   if (err) {
     throw err
   }
   const i = ipfs.files.add()
   var filePair
   i.on('data', (file) => {
     console.log('added', bs58.encode(file.multihash).toString(), file.path)
   })
   i.once('end', () => {
     return
   })
   if (res.length !== 0) {
     const index = inPath.lastIndexOf('/')
     parallelLimit(res.map((element) => (callback) => {
       if (!fs.statSync(element).isDirectory()) {
         i.write({
           path: element.substring(index + 1, element.length),
           stream: fs.createReadStream(element)
         })
       }
       callback()
     }), 10, (err) => {
       if (err) {
         throw err
       }
       i.end()
     })
   } else {
     rs = fs.createReadStream(inPath)
     inPath = inPath.substring(inPath.lastIndexOf('/') + 1, inPath.length)
     filePair = {path: inPath, stream: rs}
     i.write(filePair)
     i.end()
   }
 })
parallel(tasks, PARALLEL_LIMIT, (err, results) => {
  console.log("\nBacktesting complete, saving results...");
  results = results.filter(function (r) {
    return !!r
  })
  results.sort((a,b) => (a.roi < b.roi) ? 1 : ((b.roi < a.roi) ? -1 : 0));
  let fileName = `backtesting_${Math.round(+new Date()/1000)}.csv`;
  let filedsGeneral = ['roi', 'vsBuyHold', 'errorRate', 'wlRatio', 'frequency', 'endBalance', 'buyHold', 'wins', 'losses', 'period', 'min_periods', 'days'];
  let filedNamesGeneral = ['ROI (%)', 'VS Buy Hold (%)', 'Error Rate (%)', 'Win/Loss Ratio', '# Trades/Day', 'Ending Balance ($)', 'Buy Hold ($)', '# Wins', '# Losses', 'Period', 'Min Periods', '# Days'];
  let fields = {
    cci_srsi: filedsGeneral.concat(['cciPeriods', 'rsiPeriods', 'srsiPeriods', 'srsiK', 'srsiD', 'oversoldRsi', 'overboughtRsi', 'oversoldCci', 'overboughtCci', 'Constant', 'params']),
    srsi_macd: filedsGeneral.concat(['rsiPeriods', 'srsiPeriods', 'srsiK', 'srsiD', 'oversoldRsi', 'overboughtRsi', 'emaShortPeriod', 'emaLongPeriod', 'signalPeriod', 'upTrendThreshold', 'downTrendThreshold', 'params']),
    macd: filedsGeneral.concat([ 'emaShortPeriod', 'emaLongPeriod', 'signalPeriod', 'upTrendThreshold', 'downTrendThreshold', 'overboughtRsiPeriods', 'overboughtRsi', 'params']),
    rsi: filedsGeneral.concat(['rsiPeriods', 'oversoldRsi', 'overboughtRsi', 'rsiRecover', 'rsiDrop', 'rsiDivsor', 'params']),
    sar: filedsGeneral.concat(['sarAf', 'sarMaxAf', 'params']),
    speed: filedsGeneral.concat(['baselinePeriods', 'triggerFactor', 'params']),
    trend_ema: filedsGeneral.concat(['trendEma', 'neutralRate', 'oversoldRsiPeriods', 'oversoldRsi', 'params'])
  };
  let fieldNames = {
    cci_srsi: filedNamesGeneral.concat(['CCI Periods', 'RSI Periods', 'SRSI Periods', 'SRSI K', 'SRSI D', 'Oversold RSI', 'Overbought RSI', 'Oversold CCI', 'Overbought CCI', 'Constant', 'Full Parameters']),
    srsi_macd: filedNamesGeneral.concat(['RSI Periods', 'SRSI Periods', 'SRSI K', 'SRSI D', 'Oversold RSI', 'Overbought RSI', 'EMA Short Period', 'EMA Long Period', 'Signal Period', 'Up Trend Threshold', 'Down Trend Threshold', 'Full Parameters']),
    macd: filedNamesGeneral.concat(['EMA Short Period', 'EMA Long Period', 'Signal Period', 'Up Trend Threshold', 'Down Trend Threshold', 'Overbought Rsi Periods', 'Overbought Rsi', 'Full Parameters']),
    rsi: filedNamesGeneral.concat(['RSI Periods', 'Oversold RSI', 'Overbought RSI', 'RSI Recover', 'RSI Drop', 'RSI Divisor', 'Full Parameters']),
    sar: filedNamesGeneral.concat(['SAR AF', 'SAR MAX AF', 'Full Parameters']),
    speed: filedNamesGeneral.concat(['Baseline Periods', 'Trigger Factor', 'Full Parameters']),
    trend_ema: filedNamesGeneral.concat(['Trend EMA', 'Neutral Rate', 'Oversold RSI Periods', 'Oversold RSI', 'Full Parameters'])
  };
  let csv = json2csv({
    data: results,
    fields: fields[strategyName],
    fieldNames: fieldNames[strategyName]
  });

  fs.writeFile(fileName, csv, err => {
    if (err) throw err;
    console.log(`\nResults successfully saved to ${fileName}!\n`);
  });
});
Beispiel #15
0
function forceInstall(nodeModules, shrinkwrap, opts, cb) {
    if (typeof opts === 'function') {
        cb = opts;
        opts = {};
    }

    // if no dependencies object then terminate recursion
    if (shrinkwrap.name && !shrinkwrap.dependencies) {
        return purgeExcess(nodeModules, shrinkwrap, opts, cb);
    }

    var deps = shrinkwrap.dependencies;
    // console.log('shrinkwrap', shrinkwrap);
    var tasks = Object.keys(deps).map(function (key) {
        var dep = deps[key];
        if (!dep.name) {
            dep.name = key;
        }
        var filePath = path.join(nodeModules, key);

        return isCorrect.bind(null, filePath, dep, opts);
    });

    tasks.push(purgeExcess.bind(
        null, nodeModules, shrinkwrap, opts));

    parallelLimit(tasks, 10, function (err, results) {
        if (err) {
            return cb(err);
        }

        opts.dev = false;

        // remove purgeExcess result
        results.pop();

        var incorrects = results.filter(function (dep) {
            return !dep.correct;
        });
        var corrects = results.filter(function (dep) {
            return dep.correct;
        });

        /*  for each incorrect

             - install it
             - remove excess
             - force install all children


        */
        var inCorrectTasks = incorrects.map(function (incorrect) {
            var name = incorrect.name;
            var folder = path.join(nodeModules,
                name, 'node_modules');

            return series.bind(null, [
                installModule.bind(
                    null, nodeModules, incorrect, opts),
                forceInstall.bind(null, folder, incorrect, opts)
            ]);
        });
        var correctTasks = corrects.map(function (correct) {
            var name = correct.name;
            var folder = path.join(nodeModules, name,
                'node_modules');

            return forceInstall.bind(
                null, folder, correct, opts);
        });

        /* for each correct

            - force install all children
        */

        var tasks = [].concat(inCorrectTasks, correctTasks);

        parallelLimit(tasks, 10, cb);
    });
}
let simulateGeneration = () => {
  console.log(`\n\n=== Simulating generation ${++generationCount} ===\n`)

  let days = argv.days
  if (!days) {
    if (argv.start) {
      var start = moment(argv.start, 'YYYYMMDDhhmm')
      days = Math.max(1, moment().diff(start, 'days'))
    }
    else {
      var end = moment(argv.end, 'YYYYMMDDhhmm')
      days = moment().diff(end, 'days') + 1
    }
  }
  runUpdate(days, argv.selector)

  iterationCount = 1
  let tasks = selectedStrategies.map(v => pools[v]['pool'].population().map(phenotype => {
    return cb => {
      runCommand(v, phenotype, cb)
    }
  })).reduce((a, b) => a.concat(b))

  parallel(tasks, PARALLEL_LIMIT, (err, results) => {
    console.log('\nGeneration complete, saving results...')
    results = results.filter(function(r) {
      return !!r
    })

    results.sort((a, b) => (a.fitness < b.fitness) ? 1 : ((b.fitness < a.fitness) ? -1 : 0))

    let fieldsGeneral = ['selector.normalized', 'fitness', 'vsBuyHold', 'wlRatio', 'frequency', 'strategy', 'order_type', 'endBalance', 'buyHold', 'wins', 'losses', 'period_length', 'min_periods', 'days', 'params']
    let fieldNamesGeneral = ['Selector', 'Fitness', 'VS Buy Hold (%)', 'Win/Loss Ratio', '# Trades/Day', 'Strategy', 'Order Type', 'Ending Balance ($)', 'Buy Hold ($)', '# Wins', '# Losses', 'Period', 'Min Periods', '# Days', 'Full Parameters']

    let dataCSV = json2csv({
      data: results,
      fields: fieldsGeneral,
      fieldNames: fieldNamesGeneral
    })

    let fileDate = Math.round(+new Date() / 1000)
    let csvFileName = `simulations/backtesting_${fileDate}.csv`

    let poolData = {}
    selectedStrategies.forEach(function(v) {
      poolData[v] = pools[v]['pool'].population()
    })

    let jsonFileName = `simulations/generation_data_${fileDate}_gen_${generationCount}.json`
    let dataJSON = JSON.stringify(poolData, null, 2)
    var filesSaved = 0
    saveGenerationData(csvFileName, jsonFileName, dataCSV, dataJSON, (id)=>{
      filesSaved++
      if(filesSaved == 2){
        console.log('\n\nGeneration\'s Best Results')
        selectedStrategies.forEach((v)=> {
          let best = pools[v]['pool'].best()

          if(best.sim){
            console.log(`\t(${v}) Sim Fitness ${best.sim.fitness}, VS Buy and Hold: ${best.sim.vsBuyHold} End Balance: ${best.sim.endBalance}, Wins/Losses ${best.sim.wins}/${best.sim.losses}.`)

          } else {
            console.log(`\t(${v}) Result Fitness ${results[0].fitness}, VS Buy and Hold: ${results[0].vsBuyHold}, End Balance: ${results[0].endBalance}, Wins/Losses ${results[0].wins}/${results[0].losses}.`)
          }

          // prepare command snippet from top result for this strat
          let prefix = './zenbot.sh sim '
          let bestCommand = generateCommandParams(results[0])

          bestCommand = prefix + bestCommand
          bestCommand = bestCommand + ' --asset_capital=' + argv.asset_capital + ' --currency_capital=' + argv.currency_capital

          console.log(bestCommand + '\n')

          let nextGen = pools[v]['pool'].evolve()
        })

        simulateGeneration()
      }
    })

  })
}
Beispiel #17
0
Torrent.prototype._onMetadata = function (metadata) {
  var self = this
  if (self.metadata || self.destroyed) return
  debug('got metadata')

  var parsedTorrent
  if (metadata && metadata.infoHash) {
    // `metadata` is a parsed torrent (from parse-torrent module)
    parsedTorrent = metadata
  } else {
    try {
      parsedTorrent = parseTorrent(metadata)
    } catch (err) {
      return self._onError(err)
    }
  }

  self._processParsedTorrent(parsedTorrent)
  self.metadata = self.torrentFile

  // pass full torrent metadata to discovery module
  self.discovery.setTorrent(self)

  // add web seed urls (BEP19)
  if (self.urlList) self.urlList.forEach(self.addWebSeed.bind(self))

  self.rarityMap = new RarityMap(self.swarm, self.pieces.length)

  self.store = new ImmediateChunkStore(
    new self._store(self.pieceLength, {
      files: self.files.map(function (file) {
        return {
          path: path.join(self.path, file.path),
          length: file.length,
          offset: file.offset
        }
      }),
      length: self.length
    })
  )

  self.files = self.files.map(function (file) {
    return new File(self, file)
  })

  self._hashes = self.pieces

  self.pieces = self.pieces.map(function (hash, i) {
    var pieceLength = (i === self.pieces.length - 1)
      ? self.lastPieceLength
      : self.pieceLength
    return new Piece(pieceLength)
  })

  self._reservations = self.pieces.map(function () {
    return []
  })

  self.bitfield = new BitField(self.pieces.length)

  self.swarm.wires.forEach(function (wire) {
    // If we didn't have the metadata at the time ut_metadata was initialized for this
    // wire, we still want to make it available to the peer in case they request it.
    if (wire.ut_metadata) wire.ut_metadata.setMetadata(self.metadata)

    self._onWireWithMetadata(wire)
  })

  debug('verifying existing torrent data')
  parallelLimit(self.pieces.map(function (piece, index) {
    return function (cb) {
      self.store.get(index, function (err, buf) {
        if (err) return cb(null) // ignore error
        sha1(buf, function (hash) {
          if (hash === self._hashes[index]) {
            if (!self.pieces[index]) return
            debug('piece verified %s', index)
            self.pieces[index] = null
            self._reservations[index] = null
            self.bitfield.set(index, true)
          } else {
            debug('piece invalid %s', index)
          }
          cb(null)
        })
      })
    }
  }), cpus().length, function (err) {
    if (err) return self._onError(err)
    debug('done verifying')
    self._onStore()
  })

  self.emit('metadata')
}