How to save a local streaming video of firefox - html

I am trying to share screen. I have actually used the video tag of html to stream video and then I am using the play option of this video. Now I want to save the video in my computer. How will I achieve it. For reference, below is the code:
var video = document.getElementById("video");
// Test browser support
window.navigator = window.navigator || {};
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
null;
if (navigator.getUserMedia === null) {
// document.getElementById('gum-unsupported').classList.remove('hidden');
document.getElementById('videorecorderplay-button-recorder').setAttribute('disabled', 'disabled');
document.getElementById('videorecorderstop-button-recorder').setAttribute('disabled', 'disabled');
} else {
// Opera <= 12.16 accepts the direct stream.
// More on this here: http://dev.opera.com/articles/view/playing-with-html5-video-and-getusermedia-support/
var createSrc = window.URL ? window.URL.createObjectURL : function (stream) { return stream; };
// Opera <= 12.16 support video only.
var audioContext = window.AudioContext ||
window.webkitAudioContext ||
null;
if (audioContext === null) {
document.getElementById('gum-partially-supported').classList.remove('hidden');
}
//document.getElementById('videorecorderplay-button-recorder').addEventListener('click', function () {
var constraints = {}
constraints = {
video: {
mandatory: {
minWidth: 1280,
minHeight: 720,
minFrameRate: 30
},
optional: [
{ minFrameRate: 60 }
],
mediaSource: "screen"
},
};
// Capture user's audio and video source
navigator.mozGetUserMedia(constraints, function(stream) {
console.log("Received local stream");
videoStream = stream;
video.src = createSrc(stream);
video.play();
},
function (error) {
console.log("Video capture error: ", error.code);
});
}
Once I am calling the video.play(), my screen sharing start. Now I want to save it. How will I do this.

Related

not able do screen share in kurento node js app

The below code is working for firefox but not for chrome. I have a chrome extension for screen share the screen stream is caught and I am able to send that to a video element on browser but it does not goes to other webrtc end.
function getScreenConstraints(test, callback) {
var firefoxScreenConstraints = {
mozMediaSource: 'window',
mediaSource: 'window'
};
//if(isFirefox) return firefoxScreenConstraints;
if (isFirefox) return callback(null, firefoxScreenConstraints);
// this statement defines getUserMedia constraints
// that will be used to capture content of screen
var screen_constraints = {
mandatory: {
chromeMediaSource: chromeMediaSource,
maxWidth: screen.width > 1920 ? screen.width : 1920,
maxHeight: screen.height > 1080 ? screen.height : 1080
}
};
// this statement verifies chrome extension availability
// if installed and available then it will invoke extension API
// otherwise it will fallback to command-line based screen capturing API
if (chromeMediaSource == 'desktop' && !sourceId) {
getSourceId(function() {
screen_constraints.mandatory.chromeMediaSourceId = sourceId;
/*navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
navigator.getUserMedia({ video: screen_constraints }, function(stream) {
var video = document.querySelector('video');
console.log(video);
//var video = document.getElementById('videoInput');
video.src = URL.createObjectURL(stream);
video.play();
}, function(error) {
alert(JSON.stringify(error, null, '\t'));
});*/
callback(sourceId == 'PermissionDeniedError' ? sourceId : null, screen_constraints);
});
//return;
}
// this statement sets gets 'sourceId" and sets "chromeMediaSourceId"
if (chromeMediaSource == 'desktop') {
screen_constraints.mandatory.chromeMediaSourceId = sourceId;
}
console.log(screen_constraints);
// now invoking native getUserMedia API
callback(null, screen_constraints);
}
function screen(to, from) {
if (to == '') {
window.alert("You must specify the peer name");
return;
}
var constraints = {
audio: true,
video: {
mediaSource: 'window' || 'screen'
}
}
setCallState(PROCESSING_CALL);
showSpinner(videoInput, videoOutput);
var options = {
localVideo: videoInput, //if you want to see what you are sharing
onicecandidate: onIceCandidate,
sendSource: 'screen',
mediaConstraints: constraints
}
webRtcPeer = kurentoUtils.WebRtcPeer.WebRtcPeerSendonly(options,
function(error) {
if (error) return onError(error) //You'll need to use whatever you use for handling errors
this.generateOffer(
function(error, offerSdp) {
if (error) {
console.error(error);
setCallState(NO_CALL);
}
$("#callDiv").fadeIn();
var message = {
id: 'call',
from: from,
to: to,
sdpOffer: offerSdp,
type: 'screen'
};
sendMessage(message);
});
});
}

HTML5 video recording and capture - .record is not a function

I'm trying to use WebRTC to upload a video to a ASP page.
Here is the page load script:
window.navigator = window.navigator || {};
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
null;
if (navigator.getUserMedia === null) {
//Unsupported.
} else {
document.getElementById('button-play-gum').addEventListener('click', function() {
// Capture user's audio and video source
navigator.getUserMedia({
video: true,
audio: true
},
function(stream) {
videoStream = stream;
// Stream the data
video.src = createSrc(stream);
video.play();
},
function(error) {
console.log("Video capture error: ", error.code);
});
});
}
This works fine, and I can see my camera input on screen.
Now I created a button which calls the following line.
When calling this line:
streamRecorder = videoStream.record();
I get this error:
videoStream.record is not a function
Any ideas about this?

Audio and video recorder in html5 using getusermedia()

I am creating a application to record audio and video,But it works only in Google chrome or canary.I want to work under Internet explorer and Mozilla Firefox.This is my code.
can we do it without using get user media?if yes please tell me.
pls refer this link
http://davidwalsh.name/demo/camera.php
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
MediaStreamRecorder is currently unimplemented. you should wait for implementing it in all browsers.
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongaraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<button class="recordbutton" onclick="startRecording();">RECORD</button>
</div>
http://www.w3.org/TR/mediastream-recording/

HTML5 solution to upload a webcam/camera video stream to server

Using getUserMedia I can capture video stream from client's webcam/camera. And using video tag I can show it on client's browser. Code:
<video autoplay></video>
<script type="text/javascript">
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var video = $('video')[0];
var failed = function(e) {
console.log('Denied!', e);
};
if( navigator.getUserMedia ) {
navigator.getUserMedia( {video: true, audio: true}, function( stream ) {
video.src = window.URL.createObjectURL(stream);
}, failed
)
} else {
console.log( 'Not supported!' );
}
</script>
Now is it possible to send this video stream, either as a realtime feed or after user has done recording and decided to upload, to a server?
I found few examples of:
sending binary images to server over websocket
Periodically capture frame of streaming video and send that as image
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.kongraju.in/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<button class="recordbutton" onclick="startRecording();">RECORD</button>
</div>
Spec:
http://www.w3.org/TR/mediastream-recording/
you can send recorded file to server.
Take a look at this article: http://www.smartjava.org/content/face-detection-using-html5-javascript-webrtc-websockets-jetty-and-javacvopencv
It shows a use of Webrtc:
These APIs should enable building applications that can be run inside a browser, requiring no extra downloads or plugins, that allow communication between parties using audio, video and supplementary real-time communication, without having to use intervening servers (unless needed for firewall traversal, or for providing intermediary services).

navigator.getusermedia

I was playing around with the html5 new specifications, precisely the webcam functionalities.
By following this tutorial. I was getting the following error:
Native web camera streaming (getUserMedia) is not supported in this browser.
which was taken by this if statement:
if (navigator.getUserMedia)
now, I am sure that navigator.getUserMedia is enabled in my browser, as these examples here work perfectly
so, I modified the code in the if, with the following:
if (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia)
but now, I am getting a javascript error:
Uncaught TypeError: Object #<Navigator> has no method 'getUserMedia'
at this line here:
navigator.getUserMedia('video', successCallback, errorCallback);
which doesn't really make sense! it IS working on the last link i posted!
Any ideas?
Thanks in advance.
If you're testing for navigator.getUserMedia, navigator.webkitGetUserMedia, navigator.mozGetUserMedia and navigator.msGetUserMedia then you have no guarantee that navigator.getUserMedia() is available. It could be that or any one of the other three. You could try something like this (from getUserMedia.js):
navigator.getUserMedia_ = ( navigator.getUserMedia
|| navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia
|| navigator.msGetUserMedia);
if ( !! navigator.getUserMedia_) {
navigator.getUserMedia_('video', successCallback, errorCallback);
//The rest of your code
navigator.getUserMedia() is deprecated. See MDN.
https://developer.mozilla.org/en-US/docs/Web/API/Navigator/getUserMedia
Use navigator.mediaDevices.getUserMedia(constraints); instead.
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
This is a new technology. Yoy must have a Firefox/Chrome/Opera browser and it has to be updated. Then, try this:
function showCamera() { var streaming = false,
video = window.content.document.createElement("video"),
cover = window.content.document.createElement("div"),
canvas = window.content.document.createElement("canvas"),
photo = window.content.document.createElement("img"),
startbutton = window.content.document.createElement("button"),
width = 320,
height = 0;
photo.src = "http://placekitten.com/g/320/261"; window.content.document.body.insertBefore(video, window.content.document.body.firstChild);
var navigator = window.navigator;
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
navigator.getMedia(
{
video: true,
audio: false
},
function(stream) {
if (navigator.mozGetUserMedia) {
video.mozSrcObject = stream;
} else {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
}
video.play();
},
function(err) {
console.log("An error occured! " + err);
} );
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
} }, false);
function takepicture() {
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
photo.setAttribute('src', data); }
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault(); }, false); }
showCamera();
If your browser is Firefox and still not working, go to about:config and set/add a boolean variable with a true value called media.navigator.enabled
Source: https://developer.mozilla.org/en-US/docs/WebRTC/Taking_webcam_photos
P/d: I used this code in a Greasemonkey script and it worked. I did some few changes on firsts lines of the original code.