From 4c90ba4d6464916edb4a98f88ddf461e02553de6 Mon Sep 17 00:00:00 2001 From: Lukasz Foniok Date: Mon, 9 Nov 2015 16:33:50 +0100 Subject: [PATCH] [Media] startRecord, stopRecord implementation Change-Id: I552481d5e5cf91c8af9babadb30684f12605d3ec Signed-off-by: Lukasz Foniok --- src/media/cordova_media_api.js | 239 +++++++++++++++++++++++++++++---- 1 file changed, 216 insertions(+), 23 deletions(-) diff --git a/src/media/cordova_media_api.js b/src/media/cordova_media_api.js index 0f42cc4..199e17a 100755 --- a/src/media/cordova_media_api.js +++ b/src/media/cordova_media_api.js @@ -20,18 +20,205 @@ var plugin_name = 'cordova-plugin-media.tizen.Media'; cordova.define(plugin_name, function(require, exports, module) { // TODO: remove -> end - var audioObjects = {}; + var audioObjects = {}; + var recorder = null; + + function Recorder(_filename) { + var recorder = null; + var recording = false; + var recordingLength = 0; + var volume = null; + var audioInput = null; + var sampleRate = null; + var filename = _filename; + + // creates the audio context + var audioContext = window.AudioContext || window.webkitAudioContext; + var context = new audioContext(); + + var audioBlob = null; + + this.rec = function(){ + + if (!navigator.getUserMedia) + navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || + navigator.mozGetUserMedia || navigator.msGetUserMedia; + + if (navigator.getUserMedia){ + navigator.getUserMedia({audio:true}, onGetUserMedia, function(e) { + console.log('Error capturing audio.'); + }); + } else { + console.log('getUserMedia not supported in this browser.'); + } + } + + this.stop = function (){ + recording = false; + audioBlob.stop(); + recorder.disconnect(); + + // flat the left and right channels down + var leftBuffer = mergeBuffers(leftchannel, recordingLength); + var rightBuffer = mergeBuffers(rightchannel, recordingLength); + // interleave both channels together + var interleaved = interleave(leftBuffer, rightBuffer); + + // create the buffer and view to create the .WAV file + var buffer = new ArrayBuffer(44 + interleaved.length * 2); + var view = new DataView(buffer); + + // write the WAV container + // RIFF chunk descriptor + writeUTFBytes(view, 0, 'RIFF'); + view.setUint32(4, 44 + interleaved.length * 2, true); + writeUTFBytes(view, 8, 'WAVE'); + // FMT sub-chunk + writeUTFBytes(view, 12, 'fmt '); + view.setUint32(16, 16, true); + view.setUint16(20, 1, true); + // stereo (2 channels) + view.setUint16(22, 2, true); + view.setUint32(24, sampleRate, true); + view.setUint32(28, sampleRate * 4, true); + view.setUint16(32, 4, true); + view.setUint16(34, 16, true); + // data sub-chunk + writeUTFBytes(view, 36, 'data'); + view.setUint32(40, interleaved.length * 2, true); + + // write the PCM samples + var lng = interleaved.length; + var index = 44; + var volume = 1; + for (var i = 0; i < lng; i++){ + view.setInt16(index, interleaved[i] * (0x7FFF * volume), true); + index += 2; + } + + handleReadBlob(buffer); + } + + function onGetUserMedia(stream){ + recording = true; + var leftchannel = []; + var rightchannel = []; + recordingLength = 0; + + audioBlob = stream; + + // retrieve the current sample rate to be used for WAV packaging + sampleRate = context.sampleRate; + + // creates a gain node + volume = context.createGain(); + + // creates an audio node from the microphone incoming stream + audioInput = context.createMediaStreamSource(stream); + + // connect the stream to the gain node + audioInput.connect(volume); + + /* From the spec: This value controls how frequently the audioprocess event is + dispatched and how many sample-frames need to be processed each call. + Lower values for buffer size will result in a lower (better) latency. + Higher values will be necessary to avoid audio breakup and glitches */ + var bufferSize = 2048; + recorder = context.createJavaScriptNode(bufferSize, 2, 2); + + recorder.onaudioprocess = function(sample){ + if(!recording) { + return; + } + var left = sample.inputBuffer.getChannelData(0); + var right = sample.inputBuffer.getChannelData(1); + // clone the samples + leftchannel.push(new Float32Array(left)); + rightchannel.push(new Float32Array(right)); + recordingLength += bufferSize; + } + + // connect the recorder + volume.connect (recorder); + recorder.connect (context.destination); + } + + function handleReadBlob(buffer) { + var array = new Uint8Array(buffer); + var arr = []; + for (var i=0; i " + MediaError.MEDIA_ERR_ABORTED); + console.log('media::onStalled() - MEDIA_ERROR -> ' + MediaError.MEDIA_ERR_ABORTED); - var err = new MediaError(MediaError.MEDIA_ERR_ABORTED, "Stalled"); + var err = new MediaError(MediaError.MEDIA_ERR_ABORTED, 'Stalled'); Media.onStatus(id, Media.MEDIA_ERROR, err); }, 2000); }; audioObjects[id].onEndedCB = function () { - console.log("media::onEndedCB() - MEDIA_STATE -> MEDIA_STOPPED"); + console.log('media::onEndedCB() - MEDIA_STATE -> MEDIA_STOPPED'); Media.onStatus(id, Media.MEDIA_STATE, Media.MEDIA_STOPPED); }; audioObjects[id].onErrorCB = function (event) { - console.log("media::onErrorCB() - MEDIA_ERROR -> " + event.srcElement.error); + console.log('media::onErrorCB() - MEDIA_ERROR -> ' + event.srcElement.error); Media.onStatus(id, Media.MEDIA_ERROR, event.srcElement.error); }; audioObjects[id].onPlayCB = function () { - console.log("media::onPlayCB() - MEDIA_STATE -> MEDIA_STARTING"); + console.log('media::onPlayCB() - MEDIA_STATE -> MEDIA_STARTING'); Media.onStatus(id, Media.MEDIA_STATE, Media.MEDIA_STARTING); }; audioObjects[id].onPlayingCB = function () { - console.log("media::onPlayingCB() - MEDIA_STATE -> MEDIA_RUNNING"); + console.log('media::onPlayingCB() - MEDIA_STATE -> MEDIA_RUNNING'); Media.onStatus(id, Media.MEDIA_STATE, Media.MEDIA_RUNNING); }; audioObjects[id].onDurationChangeCB = function () { - console.log("media::onDurationChangeCB() - MEDIA_DURATION -> " + audioObjects[id].duration); + console.log('media::onDurationChangeCB() - MEDIA_DURATION -> ' + audioObjects[id].duration); Media.onStatus(id, Media.MEDIA_DURATION, audioObjects[id].duration); }; audioObjects[id].onTimeUpdateCB = function () { - console.log("media::onTimeUpdateCB() - MEDIA_POSITION -> " + audioObjects[id].currentTime); + console.log('media::onTimeUpdateCB() - MEDIA_POSITION -> ' + audioObjects[id].currentTime); Media.onStatus(id, Media.MEDIA_POSITION, audioObjects[id].currentTime); }; audioObjects[id].onCanPlayCB = function () { - console.log("media::onCanPlayCB()"); + console.log('media::onCanPlayCB()'); window.clearTimeout(audioObjects[id].timer); @@ -95,7 +282,7 @@ cordova.define(plugin_name, function(require, exports, module) { startPlayingAudio: function(successCallback, errorCallback, args) { var id = args[0], src = args[1]; - console.log("media::startPlayingAudio() - id =" + id + ", src =" + src); + console.log('media::startPlayingAudio() - id =' + id + ', src =' + src); audioObjects[id].addEventListener('canplay', audioObjects[id].onCanPlayCB); audioObjects[id].addEventListener('ended', audioObjects[id].onEndedCB); @@ -118,7 +305,7 @@ cordova.define(plugin_name, function(require, exports, module) { if (audioObjects[id].currentTime !== 0) audioObjects[id].currentTime = 0; - console.log("media::stopPlayingAudio() - MEDIA_STATE -> MEDIA_STOPPED"); + console.log('media::stopPlayingAudio() - MEDIA_STATE -> MEDIA_STOPPED'); audioObjects[id].removeEventListener('canplay', audioObjects[id].onCanPlayCB); audioObjects[id].removeEventListener('ended', audioObjects[id].onEndedCB); @@ -134,15 +321,15 @@ cordova.define(plugin_name, function(require, exports, module) { seekToAudio: function(successCallback, errorCallback, args) { var id = args[0], milliseconds = args[1]; - console.log("media::seekToAudio()"); + console.log('media::seekToAudio()'); audioObjects[id].currentTime = milliseconds; - successCallback( audioObjects[id].currentTime); + successCallback(audioObjects[id].currentTime); }, pausePlayingAudio: function(successCallback, errorCallback, args) { var id = args[0]; - console.log("media::pausePlayingAudio() - MEDIA_STATE -> MEDIA_PAUSED"); + console.log('media::pausePlayingAudio() - MEDIA_STATE -> MEDIA_PAUSED'); audioObjects[id].pause(); @@ -150,26 +337,32 @@ cordova.define(plugin_name, function(require, exports, module) { }, getCurrentPositionAudio: function(successCallback, errorCallback, args) { var id = args[0]; - console.log("media::getCurrentPositionAudio()"); + console.log('media::getCurrentPositionAudio()'); successCallback(audioObjects[id].currentTime); }, - startRecordingAudio: function(successCallback, errorCallback, args) {}, - stopRecordingAudio: function(successCallback, errorCallback, args) {}, + startRecordingAudio: function(successCallback, errorCallback, args) { + console.log('media::startRecordingAudio()'); + recorder.rec(); + }, + stopRecordingAudio: function(successCallback, errorCallback, args) { + console.log('media::stopRecordingAudio()'); + recorder.stop(); + }, release: function(successCallback, errorCallback, args) { exports.stopPlayingAudio(successCallback, errorCallback, args); var id = args[0]; delete audioObjects[id]; - console.log("media::release()"); + console.log('media::release()'); }, setVolume: function(successCallback, errorCallback, args) { var id = args[0], volume = args[1]; - console.log("media::setVolume()"); + console.log('media::setVolume()'); audioObjects[id].volume = volume; } }; -require("cordova/exec/proxy").add("Media", exports); +require('cordova/exec/proxy').add('Media', exports); console.log('Loaded cordova.media API'); -- 2.34.1