Beispiel #1
0
const mkBufferSplit = () => {
    let remainder = null;
    return Pull((read) => {
        return (abort, cb) => {
            read(abort, function (end, data) {
                if (end) {
                    if (data) { console.log("mkBufferSplit() Data at the end"); }
                    cb(end, remainder ? [remainder, data] : [data]);
                    remainder = null;
                    return;
                }
                const queue = [];
                for (;;) {
                    const offset = data.indexOf(NEWLINE_CHR);
                    if (offset < 0) {
                        remainder = remainder ? Buffer.concat([remainder, data]) : data;
                        break;
                    }
                    let subArray = data.slice(0, offset);
                    if (remainder) {
                        subArray = Buffer.concat([remainder, subArray]);
                        remainder = null;
                    }
                    queue.push(subArray);
                    data = data.slice(offset + 1);
                }
                cb(end, queue);
            });
        };
    }, Pull.flatten());
};
Beispiel #2
0
module.exports = function addDefaultAssets (self, log, callback) {
  const initDocsPath = path.join(__dirname, '../../init-files/init-docs')
  const index = initDocsPath.lastIndexOf(path.sep)

  pull(
    pull.values([initDocsPath]),
    pull.asyncMap((val, cb) =>
      glob(path.join(val, '/**/*'), { nodir: true }, cb)
    ),
    pull.flatten(),
    pull.map(element => {
      const addPath = element.substring(index + 1)
      return { path: addPath, content: file(element) }
    }),
    self.addPullStream(),
    pull.through(file => {
      if (file.path === 'init-docs') {
        const cid = new CID(file.hash)
        log('to get started, enter:\n')
        log(`\tjsipfs cat /ipfs/${cid.toBaseEncodedString()}/readme\n`)
      }
    }),
    pull.collect((err) => {
      if (err) {
        return callback(err)
      }

      callback(null, true)
    })
  )
}
Beispiel #3
0
 const createAddPullStream = () => {
   return pull(
     pull.map(normalizeContent),
     pull.flatten(),
     importer(self._dagS),
     pull.asyncMap(prepareFile.bind(null, self))
   )
 }
Beispiel #4
0
 tags: function (opts) {
   return pull(
     sbot.query.read({query: [
       {$filter: {value: {content: {
         type: "curation",
         curate: {$prefix: ''}
       }}}},
       {$map: ['value', 'content', 'tags']}
     ]}),
     pull.flatten(),
     pull.map(function (e) {
       return e.split(/[ ,]+/)
     }),
     pull.flatten(),
     pull.filter(v.isTag),
     pull.unique()
   )
 },
Beispiel #5
0
 indexDb.rebuild = function (cb) {
   pull(
     pl.read(db),
     pull.map(function (op) {
       var batch = []
       createIndex(op, function (ch) {
         batch.push(ch)
       })
       return batch
     }),
     pull.flatten(),
     pl.write(indexDb, cb)
   )
 }
Beispiel #6
0
// Returns a through stream which injects index docs corresponding to each doc
// in the input stream
function addIndexDocs (index_defs) {
  if (!tc('[String|[String]]', index_defs)) {
    throw new Error('index_defs is not supposed to be ' + index_defs)
  }

  return pull(
    pull.map(function (doc) {
      var batch = Object.keys(index_defs).map(function (key) {
        return makeIndexDoc(doc, index_defs[key])
      })

      doc.type = 'put'
      batch.push(doc)
      return batch
    }),
    pull.flatten()
  )
}
Beispiel #7
0
u.getForks = function(sbot, baseMsg, eachFork) {
  var cat = require('pull-cat')
  var pull = require('pull-stream')
  return cat([
    pull.once(baseMsg),
    pull(
      sbot.links({
        dest: baseMsg.key,
        rel: 'upstream',
        values: true
      }),
      pull.map(function (msg) {
        if (eachFork) eachFork(msg, baseMsg)
        return u.getForks(sbot, msg, eachFork)
      }),
      pull.flatten()
    )
  ])
}
Beispiel #8
0
function getRepoUpdates(sbot, repoMsg, includeMerged) {
  // includeMerged: include updates pushed to downstream (fork) repos
  // which are merged into the upstream

  var commitsInUpstream = {}
  function gotUpstreamCommit(commit) {
    commitsInUpstream[commit.sha1] = true
  }
  function isCommitInUpstream(commit) {
    return commit && commitsInUpstream[commit.sha1]
  }

  return pull(
    includeMerged ? u.getForks(sbot, repoMsg) : pull.once(repoMsg),
    pull.map(function (msg) {
      return sbot.links({
        dest: msg.key,
        rel: 'repo',
        values: true,
        reverse: true,
      })
    }),
    pull.flatten(),
    pull.filter(function (msg) {
      var c = msg.value.content
      if (c.type !== 'git-update') return false
      if (!includeMerged) return true
      var commits = Array.isArray(c.commits) ? c.commits : []
      // upstream messages come first
      if (c.repo === repoMsg.key) {
        // keep track of commits in upstream
        commits.forEach(gotUpstreamCommit)
        return true
      } else {
        // update to a fork. only include if it was later merged upstream.
        return commits.some(isCommitInUpstream)
      }
    })
  )
}
Beispiel #9
0
    function next (err) {
      if (err)
        return cb(err)

      // replay the log
      pull(
        db.createLogStream({ keys: true, values: true }),
        pull.map(function (msg) {
          var ops = []
          function add (item) { ops.push(item) }
          indexMsg(add, msg.timestamp, msg.key, msg.value)
          return ops
        }),
        pull.flatten(),
        pl.write(indexDB, next2)
      )
      function next2 (err) {
        if (err)
          return cb(err)

        sysDB.put('vmajor', getVMajor(), cb)
      }
    }