Esempio n. 1
0
  function tokenize(chunk) {
    var lastChunk = !chunk;
    var nextOffset = 0;
    var match = null;
    var separator;

    if (chunk) inputBuffer += chunk;

    while ((match = pattern.exec(inputBuffer)) !== null) {
      // Content prior to match can be returned without transform
      if (match.index !== nextOffset) {
        separator = inputBuffer.slice(nextOffset, match.index);
        leftsplit(separator, /(\r?\n)/).forEach(function pushSlice(slice) {
          push.call(this, slice);
        }, this);
      }

      // Match within bounds: [  xxxx  ]
      if (lastChunk || pattern.lastIndex < inputBuffer.length) {
        push.call(this, match[0], match);

        // Next match must be after this match
        nextOffset = pattern.lastIndex;
      // Match against bounds: [     xxx]
      } else {
        // Next match will be the start of this match
        nextOffset = match.index;
      }
    }
    inputBuffer = inputBuffer.slice(nextOffset);
    pattern.lastIndex = 0;
  }
Esempio n. 2
0
  function flush(cb) {
    tokenize.call(this);

    // Empty internal buffer and signal the end of the output stream.
    if (inputBuffer !== '') {
      leftsplit(inputBuffer, /(\r?\n)/).forEach(function pushSlice(slice) {
        push.call(this, slice);
      }, this);
    }

    this.push(null);
    return cb();
  }
Esempio n. 3
0
  function toSeparators(separator) {
    // Each line must be processed individually for correct sourcemap output
    let separators = leftsplit(separator, /(\r?\n)/);

    separators = _.map(separators, content => {
      if (!content) return null;

      const output = { content, line, column };
      output.source = source;
      output.indent = inheritedIndent;
      output.parents = [...inheritedParents];

      ({ line, column } = shiftCursor(content, { line, column }));

      return output;
    });

    return _.without(separators, null);
  }