Example #1
0
  return function(beatGridPath, unitOrPath) {
    const isPath = !isValidNumber(unitOrPath);

    const getUnit = function(context) {
      return isPath ? context.get(unitOrPath) : unitOrPath;
    }

    return Ember.computed(`${beatGridPath}.beatScale`, isPath ? unitOrPath : '', {
      get() {
        const unit = getUnit(this);
        const beatGrid = this.get(beatGridPath);

        return beatGrid && beatGrid[beatGridFunctionName](unit);
      },

      // TODO(TECHDEBT): this only works for timeToBeat
      set(key, beat) {
        // Ember.Logger.log(`set ${beatGridFunctionName}`, beat);
        Ember.assert('Must set `${beatGridFunctionName} to valid number', isValidNumber(beat));

        const beatGrid = this.get(beatGridPath);
        const time = beatGrid && beatGrid.beatToTime(beat);

        this.set(unitOrPath, time);

        return beat;
      },
    });
  };
Example #2
0
  firstBarOffset: Ember.computed('barGridTime', 'bpm', 'timeSignature', function() {
    const bpm = this.get('bpm');
    const timeSignature = this.get('timeSignature');
    const secondsPerBeat = bpmToSpb(bpm);
    const secondsPerBar = secondsPerBeat * timeSignature;

    let firstBarOffsetTime = this.get('barGridTime');
    if (isValidNumber(bpm) && isValidNumber(timeSignature) && isValidNumber(firstBarOffsetTime)) {
      while ((firstBarOffsetTime - secondsPerBar) >= 0) {
        firstBarOffsetTime -= secondsPerBar;
      }

      return firstBarOffsetTime * secondsPerBar;
    } else {
      return 0;
    }
  }),
Example #3
0
      return DS.PromiseObject.create({ promise });
    }
  }),

  _peaksCache: Ember.computed(() => Ember.Map.create()),

  // returns a promise of an array of arrays of [ymin, ymax] values of the waveform
  // from startTime to endTime when broken into length subranges
  // returns a promise
  getPeaks({ startTime, endTime, length }) {
    // Ember.Logger.log('AudioBinary.getPeaks', startTime, endTime, length);

    const audioBuffer = this.get('audioBuffer');
    if (!audioBuffer) { return asResolvedPromise([]); }

    Ember.assert('Cannot AudioBinary.getPeaks without length', isValidNumber(length));
    startTime = isValidNumber(startTime) ? startTime : 0;
    endTime = isValidNumber(endTime) ? endTime : 0;

    const cacheKey = `startTime:${startTime},endTime:${endTime},length:${length},audioBufferDuration:${audioBuffer.duration}`;
    const peaksCache = this.get('_peaksCache');
    const cached = peaksCache.get(cacheKey);

    if (cached) {
      // Ember.Logger.log('AudioBinary.getPeaks cache hit', startTime, endTime, length);
      return cached;
    }

    // TODO(REFACTOR): update to use multiple channels
    const samples = audioBuffer.getChannelData(0);
    const sampleRate = audioBuffer.sampleRate;
Example #4
0
 durationText: Ember.computed('mix.duration', function() {
   const duration = this.get('mix.duration');
   return msToTime(isValidNumber(duration) ? duration : 0);
 }),
Example #5
0
  TrackMarkerMixin,
  RequireAttributes('beat'), {

  beat: null,

  // setting time proxies to setting beat
  time: Ember.computed('beatGrid.beatScale', 'beat', {
    get(key) {
      let beatGrid = this.get('beatGrid');
      return beatGrid && beatGrid.beatToTime(this.get('beat'));
    },

    set(key, time) {
      let beatGrid = this.get('beatGrid');

      Ember.assert('Can only set marker time with numeric time', isValidNumber(time));
      Ember.assert('Can only set marker time with valid beatGrid', Ember.isPresent(beatGrid));

      this.set('beat', beatGrid.timeToBeat(time));
      return time;
    }
  }),

  // setting bar proxies to setting beat
  bar: Ember.computed('beatGrid.barScale', 'beat', {
    get(key) {
      let beatGrid = this.get('beatGrid');
      return beatGrid && beatGrid.beatToBar(this.get('beat'));
    },

    set(key, bar) {
Example #6
0
  tempo: Ember.computed('syncBpm', 'audioBpm', function() {
    const syncBpm = this.get('syncBpm');
    const audioBpm = this.get('audioBpm');

    return (isValidNumber(syncBpm) && isValidNumber(audioBpm)) ? (syncBpm / audioBpm) : 1;
  }),
Example #7
0
  function onaudioprocess({
    type,
    inputs,
    outputs,
    parameters,
    playbackTime,
    node,
  }) {

    // outputs is array of arrays of outputs
    const l = outputs[0][0];
    const r = outputs[0][1];

    // naively take first pitch and tempo values for this sample
    const pitch = parameters.pitch && parameters.pitch[0];
    const tempo = parameters.tempo && parameters.tempo[0];
    const soundtouch = filter.pipe;

    if (isValidNumber(pitch)) {
      soundtouch.pitchSemitones = pitch;
    }
    if (isValidNumber(tempo)) {
      soundtouch.tempo = tempo;
    }

    // calculate how many frames to extract based on isPlaying
    const isPlaying = parameters.isPlaying || [];
    const bufferSize = l.length;

    let extractFrameCount = 0;
    for (let i = 0; i < isPlaying.length; i++) {
      extractFrameCount += isPlaying[i];
    }

    // if playing, calculate expected vs actual position
    if (extractFrameCount !== 0) {
      const actualElapsedSamples = Math.max(0, filter.position - filterStartPosition + extractFrameCount);
      const elapsedTime = Math.min(audioContext.currentTime, endTime) - startTime;
      const expectedElapsedSamples = Math.max(0, elapsedTime * sampleRate);
      const sampleDelta = ~~(expectedElapsedSamples - actualElapsedSamples);

      // if we've drifed past tolerance, adjust frames to extract
      if (Math.abs(sampleDelta) >= SAMPLE_DRIFT_TOLERANCE) {

        // console.log('actualElapsedSamples', actualElapsedSamples);
        // console.log('expectedElapsedSamples', expectedElapsedSamples);

        // if we're behind where we should be, extract dummy frames to catch up
        if (sampleDelta > 0) {
          // console.log("DRIFT", sampleDelta, extractFrameCount, windowBufferSize);
          const dummySamples = new Float32Array(sampleDelta * channelCount);
          const dummyFramesExtracted = filter.extract(dummySamples, sampleDelta);

        // if we're ahead of where we should be, rewind
        } else if (sampleDelta < 0) {
          filter.position += sampleDelta;
        }
      }
    }


    const framesExtracted = extractFrameCount > 0 ? filter.extract(samples, extractFrameCount) : 0;

    // map extracted frames onto output
    let filterFrame = 0;
    for (let i = 0; i < bufferSize; i++) {
      l[i] = (samples[filterFrame * 2] * isPlaying[i]) || 0;
      r[i] = (samples[filterFrame * 2 + 1] * isPlaying[i]) || 0;
      filterFrame += isPlaying[i];
    }
  };
Example #8
0
export function createSoundtouchNode({ audioContext, filter, startTime, offsetTime, endTime, defaultTempo, defaultPitch }) {
  console.log('createSoundtouchNode')
  const channelCount = 2;
  const windowBufferSize = window.BUFFER_SIZE;

  if (!(audioContext && filter
    && isValidNumber(startTime) && isValidNumber(offsetTime) && isValidNumber(endTime))) {
    Ember.Logger.warn('Must provide all params to createSoundtouchNode', endTime);
    return;
  }

  const samples = new Float32Array(windowBufferSize * channelCount);
  const sampleRate = audioContext.sampleRate || 44100;
  const startSample = ~~(offsetTime * sampleRate);

  filter.sourcePosition = startSample;

  const filterStartPosition = filter.position;

  function onaudioprocess({
    type,
    inputs,
    outputs,
    parameters,
    playbackTime,
    node,
  }) {

    // outputs is array of arrays of outputs
    const l = outputs[0][0];
    const r = outputs[0][1];

    // naively take first pitch and tempo values for this sample
    const pitch = parameters.pitch && parameters.pitch[0];
    const tempo = parameters.tempo && parameters.tempo[0];
    const soundtouch = filter.pipe;

    if (isValidNumber(pitch)) {
      soundtouch.pitchSemitones = pitch;
    }
    if (isValidNumber(tempo)) {
      soundtouch.tempo = tempo;
    }

    // calculate how many frames to extract based on isPlaying
    const isPlaying = parameters.isPlaying || [];
    const bufferSize = l.length;

    let extractFrameCount = 0;
    for (let i = 0; i < isPlaying.length; i++) {
      extractFrameCount += isPlaying[i];
    }

    // if playing, calculate expected vs actual position
    if (extractFrameCount !== 0) {
      const actualElapsedSamples = Math.max(0, filter.position - filterStartPosition + extractFrameCount);
      const elapsedTime = Math.min(audioContext.currentTime, endTime) - startTime;
      const expectedElapsedSamples = Math.max(0, elapsedTime * sampleRate);
      const sampleDelta = ~~(expectedElapsedSamples - actualElapsedSamples);

      // if we've drifed past tolerance, adjust frames to extract
      if (Math.abs(sampleDelta) >= SAMPLE_DRIFT_TOLERANCE) {

        // console.log('actualElapsedSamples', actualElapsedSamples);
        // console.log('expectedElapsedSamples', expectedElapsedSamples);

        // if we're behind where we should be, extract dummy frames to catch up
        if (sampleDelta > 0) {
          // console.log("DRIFT", sampleDelta, extractFrameCount, windowBufferSize);
          const dummySamples = new Float32Array(sampleDelta * channelCount);
          const dummyFramesExtracted = filter.extract(dummySamples, sampleDelta);

        // if we're ahead of where we should be, rewind
        } else if (sampleDelta < 0) {
          filter.position += sampleDelta;
        }
      }
    }


    const framesExtracted = extractFrameCount > 0 ? filter.extract(samples, extractFrameCount) : 0;

    // map extracted frames onto output
    let filterFrame = 0;
    for (let i = 0; i < bufferSize; i++) {
      l[i] = (samples[filterFrame * 2] * isPlaying[i]) || 0;
      r[i] = (samples[filterFrame * 2 + 1] * isPlaying[i]) || 0;
      filterFrame += isPlaying[i];
    }
  };

  defaultPitch = parseFloat(defaultPitch);
  defaultPitch = isValidNumber(defaultPitch) ? defaultPitch : 0;

  defaultTempo = parseFloat(defaultTempo);
  defaultTempo = isValidNumber(defaultTempo) ? defaultTempo : 1;

  const node = new AudioWorkerNode(audioContext, onaudioprocess, {
    numberOfInputs: 2,
    numberOfOutputs: 2,
    bufferLength: windowBufferSize,
    dspBufLength: windowBufferSize,
    parameters: [
      {
        name: 'pitch',
        defaultValue: defaultPitch,
      },
      {
        name: 'tempo',
        defaultValue: defaultTempo,
      },
      {
        name: 'isPlaying',
        defaultValue: 0,
      }
    ],
  });

  // schedule node start and end
  if (endTime > audioContext.currentTime) {
    node.isPlaying.setValueAtTime(1, startTime);
    node.isPlaying.setValueAtTime(0, endTime);
  }

  return node;
}
Example #9
0
    return this.destroyAutomationClips().then(() => {
      const store = this.get('store');
      beatCount = isValidNumber(beatCount) ? beatCount : this.get('beatCount');


      const fromTrackVolumeClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_VOLUME,
        transition: this,
      });
      const fromTrackLowBandClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_LOW_BAND,
        transition: this,
      });
      const fromTrackMidBandClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_MID_BAND,
        transition: this,
      });
      const fromTrackHighBandClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_HIGH_BAND,
        transition: this,
      });
      const fromTrackDelayWetClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_DELAY_WET,
        transition: this,
      });
      const fromTrackDelayCutoffClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_DELAY_CUTOFF,
        transition: this,
      });
      const fromTrackHighpassCutoffClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_HIGHPASS_CUTOFF,
        transition: this,
      });
      const fromTrackHighpassQClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_HIGHPASS_Q,
        transition: this,
      });
      const fromTrackLowpassCutoffClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_LOWPASS_CUTOFF,
        transition: this,
      });
      const fromTrackLowpassQClip = store.createRecord('mix/transition/from-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_LOWPASS_Q,
        transition: this,
      });

      const toTrackVolumeClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_VOLUME,
        transition: this,
      });
      const toTrackLowBandClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_LOW_BAND,
        transition: this,
      });
      const toTrackMidBandClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_MID_BAND,
        transition: this,
      });
      const toTrackHighBandClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_HIGH_BAND,
        transition: this,
      });
      const toTrackHighpassCutoffClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_HIGHPASS_CUTOFF,
        transition: this,
      });
      const toTrackHighpassQClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_HIGHPASS_Q,
        transition: this,
      });
      const toTrackLowpassCutoffClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_LOWPASS_CUTOFF,
        transition: this,
      });
      const toTrackLowpassQClip = store.createRecord('mix/transition/to-track-automation-clip', {
        controlType: CONTROL_TYPE_FILTER_LOWPASS_Q,
        transition: this,
      });

      const fromTrackAutomationClips = [
        fromTrackVolumeClip,
        fromTrackLowBandClip,
        fromTrackMidBandClip,
        fromTrackHighBandClip,
        fromTrackDelayWetClip,
        fromTrackDelayCutoffClip,
        fromTrackHighpassCutoffClip,
        fromTrackHighpassQClip,
        fromTrackLowpassCutoffClip,
        fromTrackLowpassQClip
      ];

      const toTrackAutomationClips = [
        toTrackVolumeClip,
        toTrackLowBandClip,
        toTrackMidBandClip,
        toTrackHighBandClip,
        toTrackHighpassCutoffClip,
        toTrackHighpassQClip,
        toTrackLowpassCutoffClip,
        toTrackLowpassQClip
      ];

      const clips = fromTrackAutomationClips.concat(toTrackAutomationClips);

      // TODO(TECHDEBT): save automation clips BEFORE adding items. otherwise, we get a weird bug
      // where control points are removed from relationship while saving, if only one has changed
      // - not due to orderedHasMany
      return Ember.RSVP.all(clips.invoke('save')).then(() => {
        fromTrackVolumeClip.addControlPoints(generateControlPointParams({
          startValue: startVolume,
          n: volumeControlPointCount,
          beatCount,
          direction: -1
        }));

        fromTrackLowBandClip.addControlPoints(generateControlPointParams({
          startValue: 6,
          endValue: 6,
          n: volumeControlPointCount,
          beatCount,
        }));
        fromTrackMidBandClip.addControlPoints(generateControlPointParams({
          startValue: 6,
          endValue: 6,
          n: volumeControlPointCount,
          beatCount,
        }));
        fromTrackHighBandClip.addControlPoints(generateControlPointParams({
          startValue: 6,
          endValue: 6,
          n: volumeControlPointCount,
          beatCount,
        }));

        fromTrackHighpassCutoffClip.addControlPoints(generateControlPointParams({
          startValue: 20,
          endValue: 20,
          n: volumeControlPointCount,
          beatCount,
        }));
        fromTrackHighpassQClip.addControlPoints(generateControlPointParams({
          startValue: 1,
          endValue: 1,
          n: volumeControlPointCount,
          beatCount,
        }));

        fromTrackLowpassCutoffClip.addControlPoints(generateControlPointParams({
          startValue: 22050,
          endValue: 22050,
          n: volumeControlPointCount,
          beatCount,
        }));
        fromTrackLowpassQClip.addControlPoints(generateControlPointParams({
          startValue: 1,
          endValue: 1,
          n: volumeControlPointCount,
          beatCount,
        }));

        fromTrackDelayWetClip.addControlPoints([
          {
            beat: 0,
            value: 0,
          },
          {
            beat: 3 * (beatCount / 4),
            value: 0,
          },
          {
            beat: beatCount,
            value: 0.8
          }
        ]);

        fromTrackDelayCutoffClip.addControlPoints([
          {
            beat: 0,
            value: 20,
          },
          {
            beat: 3 * (beatCount / 4),
            value: 10000,
          },
          {
            beat: beatCount,
            value: 2000,
          }
        ]);

        toTrackVolumeClip.addControlPoints(generateControlPointParams({
          startValue: startVolume,
          n: volumeControlPointCount,
          beatCount,
          direction: 1
        }));

        toTrackLowBandClip.addControlPoints(generateControlPointParams({
          startValue: 6,
          endValue: 6,
          n: volumeControlPointCount,
          beatCount,
        }));
        toTrackMidBandClip.addControlPoints(generateControlPointParams({
          startValue: 6,
          endValue: 6,
          n: volumeControlPointCount,
          beatCount,
        }));
        toTrackHighBandClip.addControlPoints(generateControlPointParams({
          startValue: 6,
          endValue: 6,
          n: volumeControlPointCount,
          beatCount,
        }));

        toTrackHighpassCutoffClip.addControlPoints(generateControlPointParams({
          startValue: 20,
          endValue: 20,
          n: volumeControlPointCount,
          beatCount,
        }));
        toTrackHighpassQClip.addControlPoints(generateControlPointParams({
          startValue: 1,
          endValue: 1,
          n: volumeControlPointCount,
          beatCount,
        }));

        toTrackLowpassCutoffClip.addControlPoints(generateControlPointParams({
          startValue: 22050,
          endValue: 22050,
          n: volumeControlPointCount,
          beatCount,
        }));
        toTrackLowpassQClip.addControlPoints(generateControlPointParams({
          startValue: 1,
          endValue: 1,
          n: volumeControlPointCount,
          beatCount,
        }));

        if (isFromTrackDelayBypassed) {
          this.set('fromTrackClip.delayBypass', true);
        }

        this.get('fromTrackAutomationClips').addObjects(fromTrackAutomationClips);
        this.get('toTrackAutomationClips').addObjects(toTrackAutomationClips);

        this.set('beatCount', beatCount);
        return this;
      });
    });
Example #10
0
  TrackMarkerMixin,
  RequireAttributes('time'), {

  time: null,

  // setting beat proxies to setting time
  beat: Ember.computed('beatGrid.beatScale', 'time', {
    get(key) {
      let beatGrid = this.get('beatGrid');
      return beatGrid && beatGrid.timeToBeat(this.get('time'));
    },

    set(key, beat) {
      let beatGrid = this.get('beatGrid');

      Ember.assert('Can only set marker beat with numeric beat', isValidNumber(beat));
      Ember.assert('Can only set marker beat with valid beatGrid', Ember.isPresent(beatGrid));

      this.set('time', beatGrid.beatToTime(beat));
      return beat;
    }
  }),

  // setting bar proxies to setting time
  bar: Ember.computed('beatGrid.barScale', 'time', {
    get(key) {
      let beatGrid = this.get('beatGrid');
      return beatGrid && beatGrid.timeToBar(this.get('time'));
    },

    set(key, bar) {
Example #11
0
      return this._super.apply(this, arguments);
    }
  },

  // gets order of given index
  _getIndexOrder(index) {
    let item = this.objectAt(index);

    return item && item.get('order');
  },

  // gets order given optional prevOrder and nextOrder
  _getOrder(prevOrder, nextOrder) {
    let order;

    if (isValidNumber(prevOrder) && isValidNumber(nextOrder)) {
      order = prevOrder + ((nextOrder - prevOrder) / 2.0);
    } else if (isValidNumber(prevOrder)) {
      order = prevOrder * 2.0;
    } else if (isValidNumber(nextOrder)) {
      order = nextOrder / 2.0;
    } else {
      order = 1.000;
    }

    return order;
  },

  // TODO: this will create new array... somehow maintain old?
  content: Ember.computed.filterBy('_sortedContent', 'isRemoved', false),
  removedContent: Ember.computed.filterBy('_sortedContent', 'isRemoved'),