_stop: function() {
   if (this.state.recording) {
     AudioRecorder.stopRecording();
     this.setState({stoppedRecording: true, recording: false});
   } else if (this.state.playing) {
     AudioRecorder.stopPlaying();
     this.setState({playing: false, stoppedPlaying: true});
   }
 },
  async record() {
    const {
      recording,
      hasPermission,
      stopped,
      audioPath
    } = this.state;

    if (recording) {
      ToastAndroid.show('Already recording!', ToastAndroid.SHORT);
      return;
    }

    if (!hasPermission) {
      ToastAndroid.show('Can\'t record, no permission granted!', ToastAndroid.SHORT);
      return;
    }

    if (stopped) {
      this.prepareRecordingPath(audioPath);
    }

    this.setState({ recording: true, paused: false, stopped: false });

    try {
      await AudioRecorder.startRecording();
    } catch (error) {
      console.log(error);
    }
  }
Example #3
0
 _pause: function() {
   if (this.state.recording)
     AudioRecorder.pauseRecording();
   else if (this.state.playing) {
     // AudioRecorder.pausePlaying();
   }
 },
 _play: function() {
    if (this.state.recording) {
      this._stop();
      this.setState({recording: false});
    }
    AudioRecorder.playRecording();
    this.setState({playing: true});
  },
Example #5
0
 return function (dispatch, getState) {
     var state = getState();
     if (state.recorder.isRecording) {
         dispatch(stopRecording());
     }
     react_native_audio_1.AudioRecorder.prepareRecordingAtPath(tmpFile, {
         SampleRate: 22050,
         Channels: 1,
         AudioQuality: 'Low',
         AudioEncoding: 'aac',
         AudioEncodingBitRate: 32000
     });
     react_native_audio_1.AudioRecorder.startRecording();
     dispatch({
         type: 'START_RECORDING'
     });
 };
 prepareRecordingPath(audioPath){
   AudioRecorder.prepareRecordingAtPath(audioPath, {
     SampleRate: 22050,
     Channels: 1,
     AudioQuality: "Low",
     AudioEncoding: "aac",
     AudioEncodingBitRate: 32000
   });
 }
  async pause() {
    const { recording } = this.state;
    if (!recording) {
      ToastAndroid.show('Can\'t pause, not recording!', ToastAndroid.SHORT);
      return;
    }

    try {
      await AudioRecorder.pauseRecording();
      this.setState({ paused: true });
    } catch (error) {
      console.log(error);
    }
  }
  async resume() {
    const { paused } = this.state;
    if (!paused) {
      ToastAndroid.show('Can\'t resume, not paused!', ToastAndroid.SHORT);
      return;
    }

    try {
      await AudioRecorder.resumeRecording();
      this.setState({ paused: false });
    } catch (error) {
      console.log(error);
    }
  }
  async stop() {
    const { recording } = this.state;
    if (!recording) {
      ToastAndroid.show('Can\'t stop, not recording!', ToastAndroid.SHORT);
      return;
    }

    this.setState({ stopped: true, recording: false, paused: false });

    try {
      const filePath = await AudioRecorder.stopRecording();
      if (Platform.OS === 'android') {
        this.finishRecording(true, filePath);
      }
    } catch (error) {
      console.log(error);
    }
  }
    async _pause() {
      if (!this.state.recording) {
        console.warn('Can\'t pause, not recording!');
        return;
      }

      this.setState({stoppedRecording: true, recording: false});

      try {
        const filePath = await AudioRecorder.pauseRecording();

        // Pause is currently equivalent to stop on Android.
        if (Platform.OS === 'android') {
          this._finishRecording(true, filePath);
        }
      } catch (error) {
        console.error(error);
      }
    }
    async _stop() {
      if (!this.state.recording) {
        console.warn('Can\'t stop, not recording!');
        return;
      }

      this.setState({stoppedRecording: true, recording: false});

      try {
        const filePath = await AudioRecorder.stopRecording();

        if (Platform.OS === 'android') {
          this._finishRecording(true, filePath);
        }
        return filePath;
      } catch (error) {
        console.error(error);
      }
    }
    async _record() {
      if (this.state.recording) {
        console.warn('Already recording!');
        return;
      }

      if (!this.state.hasPermission) {
        console.warn('Can\'t record, no permission granted!');
        return;
      }

      if(this.state.stoppedRecording){
        this.prepareRecordingPath(this.state.audioPath);
      }

      this.setState({recording: true});

      try {
        const filePath = await AudioRecorder.startRecording();
      } catch (error) {
        console.error(error);
      }
    }
Example #13
0
  componentDidMount() {
    AudioRecorder.requestAuthorization().then((isAuthorised) => {
      this.setState({ hasPermission: isAuthorised });

      if (!isAuthorised) {
        return;
      }

      const { audioPath } = this.state;
      this.prepareRecordingPath(audioPath);

      AudioRecorder.onProgress = (data) => {
        this.setState({ currentTime: Math.floor(data.currentTime) });
      };

      AudioRecorder.onFinished = (data) => {
        // Android callback comes in the form of a promise instead.
        if (Platform.OS === 'ios') {
          this.finishRecording(data.status === 'OK', data.audioFileURL, data.audioFileSize);
        }
      };
    });
  }
 _record: function() {
   AudioRecorder.startRecording();
   this.setState({recording: true, playing: false});
 },
var {AudioRecorder, AudioPlayer} = require('react-native-audio');

var AudioExample = React.createClass({

  getInitialState() {
    return {
      currentTime: 0.0,
      recording: false,
      stoppedRecording: false,
      stoppedPlaying: false,
      playing: false,
      finishedRecording: ""
    }
  },
  componentDidMount() {
    AudioRecorder.prepareRecordingAtPath('test')
    AudioRecorder.onProgress = (data) => { 
      console.log(data);
      this.setState({currentTime: Math.floor(data.currentTime)});
    };
    AudioRecorder.onFinished = (data) => { 
      console.log("finished"+data.finished);
      this.setState({finished: data.finished});
    };
  },

  _renderButton: function(title, onPress, active) {
    var style = (active) ? styles.activeButtonText : styles.buttonText

    return (<TouchableHighlight style={styles.button} onPress={onPress}>
      <Text style={style}>
Example #16
0
function startPlaying() {
    react_native_audio_1.AudioRecorder.playRecording();
    return {
        type: 'START_PLAYING'
    };
}
Example #17
0
var {AudioRecorder, AudioPlayer} = require('react-native-audio');

var AudioExample = React.createClass({

  getInitialState() {
    return {
      currentTime: 0.0,
      recording: false,
      stoppedRecording: false,
      stoppedPlaying: false,
      playing: false,
      finished: false
    }
  },
  componentDidMount() {
    AudioRecorder.prepareRecordingAtPath('/test.caf')
    AudioRecorder.onProgress = (data) => {
      this.setState({currentTime: Math.floor(data.currentTime)});
    };
    AudioRecorder.onFinished = (data) => {
      this.setState({finished: data.finished});
      console.log(`Finished recording: ${data.finished}`)
    };
  },

  _renderButton: function(title, onPress, active) {
    var style = (active) ? styles.activeButtonText : styles.buttonText

    return (<TouchableHighlight style={styles.button} onPress={onPress}>
      <Text style={style}>
        {title}
Example #18
0
function pausePlaying() {
    react_native_audio_1.AudioRecorder.pausePlaying();
    return {
        type: 'PAUSE_PLAYING'
    };
}
Example #19
0
function pauseRecording() {
    react_native_audio_1.AudioRecorder.pauseRecording();
    return {
        type: 'PAUSE_RECORDING'
    };
}
Example #20
0
function stopPlaying() {
    react_native_audio_1.AudioRecorder.stopPlaying();
    return {
        type: 'STOP_PLAYING'
    };
}
Example #21
0
function stopRecording() {
    react_native_audio_1.AudioRecorder.stopRecording();
    return {
        type: 'STOP_RECORDING'
    };
}