Using getUserMedia I can capture video stream from client's webcam/camera. And using video tag I can show it on client's browser. Code:
<video autoplay></video>
<script type="text/javascript">
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var video = $('video')[0];
var failed = function(e) {
console.log('Denied!', e);
};
if( navigator.getUserMedia ) {
navigator.getUserMedia( {video: true, audio: true}, function( stream ) {
video.src = window.URL.createObjectURL(stream);
}, failed
)
} else {
console.log( 'Not supported!' );
}
</script>
Now is it possible to send this video stream, either as a realtime feed or after user has done recording and decided to upload, to a server?
I found few examples of:
sending binary images to server over websocket
Periodically capture frame of streaming video and send that as image
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<button class="recordbutton" onclick="startRecording();">RECORD</button>
</div>
Spec:
http://www.w3.org/TR/mediastream-recording/
you can send recorded file to server.
Take a look at this article: http://www.smartjava.org/content/face-detection-using-html5-javascript-webrtc-websockets-jetty-and-javacvopencv
It shows a use of Webrtc:
These APIs should enable building applications that can be run inside a browser, requiring no extra downloads or plugins, that allow communication between parties using audio, video and supplementary real-time communication, without having to use intervening servers (unless needed for firewall traversal, or for providing intermediary services).
Related
I am trying to create a Web Audio API based application. So far I have multiple buffer nodes, connected to the destination.
What I trying to achieve is to be able to record the output result when the user (for example) presses a button, I tried using recorder.js but as far as I understand you need to play the graph along.
The following code depicts the issue:
<html><body><audio controls autoplay></audio>
<script type="text/javascript" src="recorder.js"> </script>
<input onclick="startRecording()" type="button" value="start recording" />
<input onclick="stopRecording()" type="button" value="stop recording and play" />
<script>
var context = new webkitAudioContext();
var request = new XMLHttpRequest();
var onFail = function(e) {
console.log('Rejected!', e);
};
var onSuccess = function(s) {
request.open('GET', 'voice.wav', true);
request.responseType = 'arraybuffer';
request.onload = function () {
var undecodedAudio = request.response;
context.decodeAudioData(undecodedAudio, function (buffer) {
// The contents of our mp3 is now an AudioBuffer
var sourceBuffer = context.createBufferSource();
sourceBuffer.buffer = buffer;
sourceBuffer.connect (context.destination);
recorder = new Recorder(sourceBuffer);
recorder.record();
sourceBuffer.start (context.currentTime);
});
};
request.send();
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
function startRecording() {
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
audio.src = window.URL.createObjectURL(s);
});
}
</script></body></html>
What I want is when the user presses recording, the file is converted to resultant audio without actually playback.
I am trying to share screen. I have actually used the video tag of html to stream video and then I am using the play option of this video. Now I want to save the video in my computer. How will I achieve it. For reference, below is the code:
var video = document.getElementById("video");
// Test browser support
window.navigator = window.navigator || {};
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
null;
if (navigator.getUserMedia === null) {
// document.getElementById('gum-unsupported').classList.remove('hidden');
document.getElementById('videorecorderplay-button-recorder').setAttribute('disabled', 'disabled');
document.getElementById('videorecorderstop-button-recorder').setAttribute('disabled', 'disabled');
} else {
// Opera <= 12.16 accepts the direct stream.
// More on this here: http://dev.opera.com/articles/view/playing-with-html5-video-and-getusermedia-support/
var createSrc = window.URL ? window.URL.createObjectURL : function (stream) { return stream; };
// Opera <= 12.16 support video only.
var audioContext = window.AudioContext ||
window.webkitAudioContext ||
null;
if (audioContext === null) {
document.getElementById('gum-partially-supported').classList.remove('hidden');
}
//document.getElementById('videorecorderplay-button-recorder').addEventListener('click', function () {
var constraints = {}
constraints = {
video: {
mandatory: {
minWidth: 1280,
minHeight: 720,
minFrameRate: 30
},
optional: [
{ minFrameRate: 60 }
],
mediaSource: "screen"
},
};
// Capture user's audio and video source
navigator.mozGetUserMedia(constraints, function(stream) {
console.log("Received local stream");
videoStream = stream;
video.src = createSrc(stream);
video.play();
},
function (error) {
console.log("Video capture error: ", error.code);
});
}
Once I am calling the video.play(), my screen sharing start. Now I want to save it. How will I do this.
I'm trying to use WebRTC to upload a video to a ASP page.
Here is the page load script:
window.navigator = window.navigator || {};
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
null;
if (navigator.getUserMedia === null) {
//Unsupported.
} else {
document.getElementById('button-play-gum').addEventListener('click', function() {
// Capture user's audio and video source
navigator.getUserMedia({
video: true,
audio: true
},
function(stream) {
videoStream = stream;
// Stream the data
video.src = createSrc(stream);
video.play();
},
function(error) {
console.log("Video capture error: ", error.code);
});
});
}
This works fine, and I can see my camera input on screen.
Now I created a button which calls the following line.
When calling this line:
streamRecorder = videoStream.record();
I get this error:
videoStream.record is not a function
Any ideas about this?
I am creating a application to record audio and video,But it works only in Google chrome or canary.I want to work under Internet explorer and Mozilla Firefox.This is my code.
can we do it without using get user media?if yes please tell me.
pls refer this link
http://davidwalsh.name/demo/camera.php
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
MediaStreamRecorder is currently unimplemented. you should wait for implementing it in all browsers.
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongaraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<button class="recordbutton" onclick="startRecording();">RECORD</button>
</div>
http://www.w3.org/TR/mediastream-recording/
I was playing around with the html5 new specifications, precisely the webcam functionalities.
By following this tutorial. I was getting the following error:
Native web camera streaming (getUserMedia) is not supported in this browser.
which was taken by this if statement:
if (navigator.getUserMedia)
now, I am sure that navigator.getUserMedia is enabled in my browser, as these examples here work perfectly
so, I modified the code in the if, with the following:
if (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia)
but now, I am getting a javascript error:
Uncaught TypeError: Object #<Navigator> has no method 'getUserMedia'
at this line here:
navigator.getUserMedia('video', successCallback, errorCallback);
which doesn't really make sense! it IS working on the last link i posted!
Any ideas?
Thanks in advance.
If you're testing for navigator.getUserMedia, navigator.webkitGetUserMedia, navigator.mozGetUserMedia and navigator.msGetUserMedia then you have no guarantee that navigator.getUserMedia() is available. It could be that or any one of the other three. You could try something like this (from getUserMedia.js):
navigator.getUserMedia_ = ( navigator.getUserMedia
|| navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia
|| navigator.msGetUserMedia);
if ( !! navigator.getUserMedia_) {
navigator.getUserMedia_('video', successCallback, errorCallback);
//The rest of your code
navigator.getUserMedia() is deprecated. See MDN.
https://developer.mozilla.org/en-US/docs/Web/API/Navigator/getUserMedia
Use navigator.mediaDevices.getUserMedia(constraints); instead.
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
This is a new technology. Yoy must have a Firefox/Chrome/Opera browser and it has to be updated. Then, try this:
function showCamera() { var streaming = false,
video = window.content.document.createElement("video"),
cover = window.content.document.createElement("div"),
canvas = window.content.document.createElement("canvas"),
photo = window.content.document.createElement("img"),
startbutton = window.content.document.createElement("button"),
width = 320,
height = 0;
photo.src = "http://placekitten.com/g/320/261"; window.content.document.body.insertBefore(video, window.content.document.body.firstChild);
var navigator = window.navigator;
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
navigator.getMedia(
{
video: true,
audio: false
},
function(stream) {
if (navigator.mozGetUserMedia) {
video.mozSrcObject = stream;
} else {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
}
video.play();
},
function(err) {
console.log("An error occured! " + err);
} );
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
} }, false);
function takepicture() {
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
photo.setAttribute('src', data); }
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault(); }, false); }
showCamera();
If your browser is Firefox and still not working, go to about:config and set/add a boolean variable with a true value called media.navigator.enabled
Source: https://developer.mozilla.org/en-US/docs/WebRTC/Taking_webcam_photos
P/d: I used this code in a Greasemonkey script and it worked. I did some few changes on firsts lines of the original code.