Web Audio API, Recorder.js: Recording from sourceBuffer without delay - html

I am trying to create a Web Audio API based application. So far I have multiple buffer nodes, connected to the destination.
What I trying to achieve is to be able to record the output result when the user (for example) presses a button, I tried using recorder.js but as far as I understand you need to play the graph along.
The following code depicts the issue:
<html><body><audio controls autoplay></audio>
<script type="text/javascript" src="recorder.js"> </script>
<input onclick="startRecording()" type="button" value="start recording" />
<input onclick="stopRecording()" type="button" value="stop recording and play" />
<script>
var context = new webkitAudioContext();
var request = new XMLHttpRequest();
var onFail = function(e) {
console.log('Rejected!', e);
};
var onSuccess = function(s) {
request.open('GET', 'voice.wav', true);
request.responseType = 'arraybuffer';
request.onload = function () {
var undecodedAudio = request.response;
context.decodeAudioData(undecodedAudio, function (buffer) {
// The contents of our mp3 is now an AudioBuffer
var sourceBuffer = context.createBufferSource();
sourceBuffer.buffer = buffer;
sourceBuffer.connect (context.destination);
recorder = new Recorder(sourceBuffer);
recorder.record();
sourceBuffer.start (context.currentTime);
});
};
request.send();
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
function startRecording() {
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
audio.src = window.URL.createObjectURL(s);
});
}
</script></body></html>
What I want is when the user presses recording, the file is converted to resultant audio without actually playback.

Related

send audio using html5 websockets

My requirement is to send audio chunks captured from microphone to the server using html5 web socket connection.
Establish a web socket connection with the server.When user is speaking capture small chunks of audio and send to the server. This should happen till the user stops speaking for 10 seconds, then close the web socket connection. Again open the web socket connection when user starts speaking.
I know how to open a connection and send the audio for the first time.
I have two buttons. onClick of one button(startRecording) i will open web socket connection and i am recording the audio.
onClick of second button(stopRecording) i am sending the audio to the server.
But my problem is how to do this with out buttons and it has to record the audio till user pause and send to the server.
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
var websocket ;
var text;
var onSuccess = function(s) {
var context = new AudioContext();
var mediaStreamSource = context.createMediaStreamSource(s);
recorder = new Recorder(mediaStreamSource);
recorder.record();
// audio loopback
// mediaStreamSource.connect(context.destination);
}
function startRecording() {
var wsURI = "url";
websocket = new WebSocket(wsURI);
websocket.onopen = function(evt) {
onOpen(evt)
};
websocket.onclose = function(evt) { onClose() };
websocket.onmessage = function(evt) { onMessage(evt) };
websocket.onerror = function(evt) { onError(evt) };
function onOpen(evt) {
var message = {
'action': 'start',
//'content-type': 'audio/l16;rate=22050'
'content-type': 'audio/wav;rate=22050'
};
websocket.send(JSON.stringify(message));
}
function onMessage(evt) {
console.log(evt.data);
//console.log(JSON.parse(evt.data))
}
}
function onClose() {
websocket.close();
}
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
//audio.src = window.URL.createObjectURL(s);
websocket.send(s);
//websocket.onclose();
});
}

How to save a local streaming video of firefox

I am trying to share screen. I have actually used the video tag of html to stream video and then I am using the play option of this video. Now I want to save the video in my computer. How will I achieve it. For reference, below is the code:
var video = document.getElementById("video");
// Test browser support
window.navigator = window.navigator || {};
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
null;
if (navigator.getUserMedia === null) {
// document.getElementById('gum-unsupported').classList.remove('hidden');
document.getElementById('videorecorderplay-button-recorder').setAttribute('disabled', 'disabled');
document.getElementById('videorecorderstop-button-recorder').setAttribute('disabled', 'disabled');
} else {
// Opera <= 12.16 accepts the direct stream.
// More on this here: http://dev.opera.com/articles/view/playing-with-html5-video-and-getusermedia-support/
var createSrc = window.URL ? window.URL.createObjectURL : function (stream) { return stream; };
// Opera <= 12.16 support video only.
var audioContext = window.AudioContext ||
window.webkitAudioContext ||
null;
if (audioContext === null) {
document.getElementById('gum-partially-supported').classList.remove('hidden');
}
//document.getElementById('videorecorderplay-button-recorder').addEventListener('click', function () {
var constraints = {}
constraints = {
video: {
mandatory: {
minWidth: 1280,
minHeight: 720,
minFrameRate: 30
},
optional: [
{ minFrameRate: 60 }
],
mediaSource: "screen"
},
};
// Capture user's audio and video source
navigator.mozGetUserMedia(constraints, function(stream) {
console.log("Received local stream");
videoStream = stream;
video.src = createSrc(stream);
video.play();
},
function (error) {
console.log("Video capture error: ", error.code);
});
}
Once I am calling the video.play(), my screen sharing start. Now I want to save it. How will I do this.

Audio and video recorder in html5 using getusermedia()

I am creating a application to record audio and video,But it works only in Google chrome or canary.I want to work under Internet explorer and Mozilla Firefox.This is my code.
can we do it without using get user media?if yes please tell me.
pls refer this link
http://davidwalsh.name/demo/camera.php
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
MediaStreamRecorder is currently unimplemented. you should wait for implementing it in all browsers.
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongaraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<button class="recordbutton" onclick="startRecording();">RECORD</button>
</div>
http://www.w3.org/TR/mediastream-recording/

HTML5 solution to upload a webcam/camera video stream to server

Using getUserMedia I can capture video stream from client's webcam/camera. And using video tag I can show it on client's browser. Code:
<video autoplay></video>
<script type="text/javascript">
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var video = $('video')[0];
var failed = function(e) {
console.log('Denied!', e);
};
if( navigator.getUserMedia ) {
navigator.getUserMedia( {video: true, audio: true}, function( stream ) {
video.src = window.URL.createObjectURL(stream);
}, failed
)
} else {
console.log( 'Not supported!' );
}
</script>
Now is it possible to send this video stream, either as a realtime feed or after user has done recording and decided to upload, to a server?
I found few examples of:
sending binary images to server over websocket
Periodically capture frame of streaming video and send that as image
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<button class="recordbutton" onclick="startRecording();">RECORD</button>
</div>
Spec:
http://www.w3.org/TR/mediastream-recording/
you can send recorded file to server.
Take a look at this article: http://www.smartjava.org/content/face-detection-using-html5-javascript-webrtc-websockets-jetty-and-javacvopencv
It shows a use of Webrtc:
These APIs should enable building applications that can be run inside a browser, requiring no extra downloads or plugins, that allow communication between parties using audio, video and supplementary real-time communication, without having to use intervening servers (unless needed for firewall traversal, or for providing intermediary services).

Fileupload using Filereader in chrome

I have to upload a file from the local memory of application (HTML5 File api). Onselect, the user should be able to upload directly without any question. The idea is to manage download/upload seamless to the user. Here is the code :
$("body").on("click", ".upload-file", function(e){
var fileToUpload = $('input:radio[name=optionsRadios]:checked').val();
var formData = new FormData();
$('input:radio[name=optionsRadios]:checked').parent().parent().parent().remove();
window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem;
window.requestFileSystem(window.TEMPORARY, 50*1024*1024, initFS, errorHandler);
var reader = new FileReader();
function initFS(fs){
fs.root.getDirectory('/', {}, function(dirEntry){
var dirReader = dirEntry.createReader();
dirReader.readEntries(function(entries) {
for(var key = 0; key < entries.length; key++) {
var entry = entries[key];
if (entry.isFile){
var name = entry.name;
if(name == fileToUpload){
getAsText(entry.toURL());
formData.append('file', entry.toURL);
break;
}
}
}
}, errorHandler);
}, errorHandler);
}
function errorHandler(){
console.log('An error occured');
}
function getAsText(readFile) {
alert ("getting as text :" +readFile);
var reader = new FileReader();
// Read file into memory as UTF-16
reader.readAsText(readFile, "UTF-16");
// Handle progress, success, and errors
reader.onprogress = updateProgress;
reader.onload = loaded;
reader.onerror = errorHandler;
}
function loaded(evt) {
// Obtain the read file data
alert("loaded file");
var fileString = evt.target.result;
// Handle UTF-16 file dump
if(utils.regexp.isChinese(fileString)) {
//Chinese Characters + Name validation
}
else {
// run other charset test
}
// xhr.send(fileString)
}
var serverurl = "/fileserver/uploadFile?selpath="+fileToUpload;
var xhr = new XMLHttpRequest();
xhr.open('POST', serverurl);
xhr.onload = function () {
if (xhr.status === 200) {
console.log('all done: ' + xhr.status);
} else {
console.log('Something went terribly wrong...');
}
};
xhr.send(formData);
});
Now, I am trying to read the file as text (its a bad practice but wanted to find someway to make it work) but it doesn't throw any events. Can you please help me to find where I am going wrong ?