How to get media stream object form HTML5 video element in javascript - html

all
I'm in peer to peer communication using webRTC , we have media stream object from the getUserMedia which is given as input stream to peerconnection. Here I need video stream from the selected video file from the local drive which is playing using Video element of HTML5.
Is it possible to create mediastream object from the video tag?
thanks,
suri

For now you can't add a media stream from a video tag, but it should be possible in the future, as it is explained on MDN
MediaStream objects have a single input and a single output. A MediaStream object generated by getUserMedia() is called local, and has as its source input one of the user's cameras or microphones. A non-local MediaStream may be representing to a media element, like or , a stream originating over the network, and obtained via the WebRTC PeerConnection API, or a stream created using the Web Audio API MediaStreamAudioSourceNode.
But you can use Media Source Extensions API to do what yo want : you have to put the local file into a stream and append in in a MediaSource object. You can learn more about MSE here : http://www.w3.org/TR/media-source/
And you can find a demo and source of the method above here

2021 update: It is now possible using MediaRecorder interface: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder
Example from same page:
if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
var constraints = { audio: true };
var chunks = [];
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
var mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
record.style.color = "black";
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var clipName = prompt('Enter a name for your sound clip');
var clipContainer = document.createElement('article');
var clipLabel = document.createElement('p');
var audio = document.createElement('audio');
var deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.innerHTML = "Delete";
clipLabel.innerHTML = clipName;
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
var audioURL = URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
})
.catch(function(err) {
console.log('The following error occurred: ' + err);
})
}
MDN also has a detailed mini tutorial: https://developer.mozilla.org/en-US/docs/Web/API/MediaStream_Recording_API/Recording_a_media_element

Related

Google chrome sends an ogg file as webm

im using mediaStreamRecorder to record a ogg file and send it to my API via Websocket and it is not compatible with WebM, it works fine in other browsers, but in chrome, my api says that the file is a webM, and throws an error.
this is part of my recorder.js file:
mediaRecorder.onstop = function(e) {
if (audioarray.length>=2){
var blob = new Blob(audioarray, { 'type' : 'audio/ogg; codecs=opus' });
var blobURL = URL.createObjectURL(blob);
blobsize = blob.size;
reader.readAsDataURL(blob);
reader.onload = function(event){
codedaudio = reader.result;
console.log("File size: "+blobsize);
audiourl = event.target.result;
//cut Base64 code to the last ","
audiomsg = codedaudio.substring(codedaudio.lastIndexOf(",")+1,codedaudio.length);
console.log("Audio length: "+audioarray.length);
sendMessage(audiomsg,'right','audio',blobURL);
console.log("MediaStreamrecorder stopped");
audioarray = [];
};
}else{
// ToDo in short audio case
console.log("audio is too short, it will not be sent")
console.log("Audio length: "+audioarray.length);
audioarray = [];
};
}
here part of code that sends the ogg:
sendMessage = function (text,side,type,filepath) {
webmessage = {
message:text,
type: type
}
message_side = side;
//output debug
if(text!=""){
ws.send(JSON.stringify(webmessage));
if (output==true){
console.log(webmessage);
}
}
};
¿is there any way to send it as ogg on chrome?.
Unlike Firefox and possibly some other browsers, Chrome doesn't support recording audio-only in an ogg container.
You can test the support with:
MediaRecorder.isTypeSupported("audio/ogg")

HTML5 web audio controls

I have music play example http://www.smartjava.org/examples/webaudio/example3.html
And i need to show html5 audio player (with controls) for this song. How i can do it?
Javascript code from example below:
// create the audio context (chrome only for now)
// create the audio context (chrome only for now)
if (! window.AudioContext) {
if (! window.webkitAudioContext) {
alert('no audiocontext found');
}
window.AudioContext = window.webkitAudioContext;
}
var context = new AudioContext();
var audioBuffer;
var sourceNode;
var analyser;
var javascriptNode;
// get the context from the canvas to draw on
var ctx = $("#canvas").get()[0].getContext("2d");
// create a gradient for the fill. Note the strange
// offset, since the gradient is calculated based on
// the canvas, not the specific element we draw
var gradient = ctx.createLinearGradient(0,0,0,300);
gradient.addColorStop(1,'#000000');
gradient.addColorStop(0.75,'#ff0000');
gradient.addColorStop(0.25,'#ffff00');
gradient.addColorStop(0,'#ffffff');
// load the sound
setupAudioNodes();
loadSound("http://www.audiotreasure.com/mp3/Bengali/04_john/04_john_04.mp3");
function setupAudioNodes() {
// setup a javascript node
javascriptNode = context.createScriptProcessor(2048, 1, 1);
// connect to destination, else it isn't called
javascriptNode.connect(context.destination);
// setup a analyzer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 512;
// create a buffer source node
sourceNode = context.createBufferSource();
sourceNode.connect(analyser);
analyser.connect(javascriptNode);
sourceNode.connect(context.destination);
}
// load the specified sound
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// When loaded decode the data
request.onload = function() {
// decode the data
context.decodeAudioData(request.response, function(buffer) {
// when the audio is decoded play the sound
playSound(buffer);
}, onError);
}
request.send();
}
function playSound(buffer) {
sourceNode.buffer = buffer;
sourceNode.start(0);
}
// log if an error occurs
function onError(e) {
console.log(e);
}
// when the javascript node is called
// we use information from the analyzer node
// to draw the volume
javascriptNode.onaudioprocess = function() {
// get the average for the first channel
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
// clear the current state
ctx.clearRect(0, 0, 1000, 325);
// set the fill style
ctx.fillStyle=gradient;
drawSpectrum(array);
}
function drawSpectrum(array) {
for ( var i = 0; i < (array.length); i++ ){
var value = array[i];
ctx.fillRect(i*5,325-value,3,325);
// console.log([i,value])
}
};
I think what you want is to use an audio tag for your source and use createMediaElementSource to pass the audio to webaudio for visualization.
Beware that createMediaElementSource checks for CORS access so you must have appropriate cross-origin access for this to work. (It looks like your audio source doesn't return the appropriate access headers for this to work.)

Stream video through socket to html5 video tag

Hello i`ve been trying to stream a webm video through a socket.io socket directly to the html5 video tag. The client and server code follows below:
Server:
(function() {
var Alert, Channel, Receiver, Takeover, express, pathLib;
pathLib = require("path");
fs = require("fs");
express = require("express");
module.exports = function(app, sockets) {
router = express.Router();
router.get("/clearAlerts", function(req, res) {
console.log("reached!");
return sockets.emit("alert-deleted");
});
router.get("/castVideo", function(req, res) {
//move this to a better place
console.log("reachedCastVideoss");
var readStream = fs.createReadStream(pathLib.join(__dirname + "/../../../public/elephants-dream.webm"));
readStream.addListener('data', function(data) {
console.log("cast-video emitted");
sockets.emit('cast-video', data);
});
});
return app.use('/custom/', router);
};
}).call(this);
Client:
var socket = io.connect('http://localhost:4994');
window.URL = window.URL || window.webkitURL;
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
var mediaSource = new MediaSource();
var video = document.getElementById("video");
var queue = [];
var sourceBuffer;
var firstChunk = true;
video.src = window.URL.createObjectURL(mediaSource);
streamIt = function(e) {
video.pause();
mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
mediaSource.sourceBuffers[0].addEventListener('updateend', onBufferUpdated);
socket.on("cast-video", function(data) {
console.log("appending to buffer");
var uIntArray = new Uint8Array(data);
if (firstChunk) {
mediaSource.sourceBuffers[0].appendBuffer(uIntArray);
firstChunk = false;
}
queue.push(uIntArray);
if (queue.length === 33) {
//mediaSource.endOfStream();
}
});
var onBufferUpdated = function() {
if (queue.length) {
mediaSource.sourceBuffers[0].appendBuffer(queue.shift());
}
};
};
mediaSource.addEventListener('sourceopen', streamIt);
mediaSource.addEventListener('webkitsourceopen', streamIt);
When I try to run this code, It seems that the first chunk of the stream is appended
to the sourceBuffer, I can see the first frame(title and an url) of the video file im trying to play, but thats it. It seems that only the first call appendBuffer works. I read somewhere something about a required initialization segment for the video to play, but I also saw an working example that does not use this initialization segment, so im a little confuse.(link to the example)
Can anyone clarify if I really need this initial segment? If I do, how can I retrieve the byte range of this segment? Or if I dont need this segment, what is wrong in my code? Thank you.
Trying a little bit more today,Ive found that if I use the same file from http://html5-demos.appspot.com/static/media-source.html, this code actually works. When I try with the files from
http://www.webmfiles.org/demo-files, the code does not works. I have no idea why.

WebRTC audio heard without <audio> element (RTCMultiConnection)

Audio is being heard even though no audio element seems to be put inserted in the DOM.
Scenario:
Create PeerConnection without streams
Add a stream but disable the code that adds MediaElements (audio,video) to DOM
Issue:
After the stream gets across, audio can be heard from headphones (or speakers).
What should happen:
Since I'm not attaching anything to the dom I expect no audio to be heard.
Code for replicating the scenario
// <body>
// <script src="https://cdn.webrtc-experiment.com/RTCMultiConnection.js"></script>
// <button id="start">Start!</button>
// </body>
$('#start').click(function() {
var NO_MEDIA_SESSION = {video: false, audio: false, oneway: true};
var caller = new RTCMultiConnection('lets-try');
caller.session = NO_MEDIA_SESSION;
caller.dontAttachStream = true;
caller.onstream = function() { console.log("Got stream but not attaching") };
var receiver = new RTCMultiConnection('lets-try');
receiver.session = NO_MEDIA_SESSION;
receiver.dontAttachStream = true;
receiver.onstream = function() { console.log("Got stream but not attaching") };
caller.open();
receiver.connect();
receiver.onconnected = function() {
console.log("Connected!");
caller.addStream({audio: true});
}
});
I'm interested how is it possible to hear MediaStream without there being audio DOM element?
If any RTCMultiConnection specialists answering, then maybe point me how to avoid audio stream being made audible? (I want to get the stream and attach it later myself).
RTCMultiConnection creates mediaElement on the fly to make sure onstream event is fired only when media stream started flowing.
connection.onstream = function(event) {
event.mediaElement.pause(); // or volume=0
// or
event.mediaElement = null;
// or
delete event.mediaElement;
};
Updated:
Use following snippet:
var connection = new RTCMultiConnection();
connection.session = {
data: true
};
btnOpenRoom.onclick = function() {
connection.open('roomid');
};
btnJoinRoom.onclick = function() {
connection.join('roomid');
};
btnAddAudioStream.onclick = function() {
connection.addStream({
audio: true
});
};
btnAddAudioVideoStream.onclick = function() {
connection.addStream({
audio: true,
video: true
});
};

Live audio via socket.io 1.0

As from socket.io website
Binary streaming
Starting in 1.0, it's possible to send any blob back and forth: image, audio, video.
I'm now wondering, if this couldn't be the solution for something I'm trying to achieve recently.
I'm actually looking for a way how to broadcast live audio stream from (A - ie, mic input..) to all clients connected to a website of mine. Is something like this possible? I've been messing with WebRTC (https://www.webrtc-experiment.com/) examples but I haven't been able to manage the goal for more than few connected clients.
My idea is about something like getUserMedia or any other audio source (PCM, whatever..) on side A being chopped to chunks and provided to client and played for example by html5 audio element or anything.. I need to make that stream as much realtime as possible, no shout/ice cast services werent fast enough (indeed, they arent solution to my problem, they're meant to be used this way) and I don't really care about the audio quality. Crossplatform compatibility would be awesome.
Is something like that possible? By using socket.io as way how to provide those data to clients?
I would be very grateful for any reference, hint or source that could help me achieve this.
Thanks a lot.
This example shows you how to use the MediaRecorder to upload audio and then forward it using socket.io. This code will only broadcast after you're called mediaRecorder.stop(). You can choose to broadcast inside of ondataavailable. If you do that, you might want to pass a timeslice to mediaRecorder.start(), so that it doesn't trigger ondataavailable so often.
This solution isn't truly live, but I think it will help people who come back and find this question.
Client Code
var constraints = { audio: true };
navigator.mediaDevices.getUserMedia(constraints).then(function(mediaStream) {
var mediaRecorder = new MediaRecorder(mediaStream);
mediaRecorder.onstart = function(e) {
this.chunks = [];
};
mediaRecorder.ondataavailable = function(e) {
this.chunks.push(e.data);
};
mediaRecorder.onstop = function(e) {
var blob = new Blob(this.chunks, { 'type' : 'audio/ogg; codecs=opus' });
socket.emit('radio', blob);
};
// Start recording
mediaRecorder.start();
// Stop recording after 5 seconds and broadcast it to server
setTimeout(function() {
mediaRecorder.stop()
}, 5000);
});
// When the client receives a voice message it will play the sound
socket.on('voice', function(arrayBuffer) {
var blob = new Blob([arrayBuffer], { 'type' : 'audio/ogg; codecs=opus' });
var audio = document.createElement('audio');
audio.src = window.URL.createObjectURL(blob);
audio.play();
});
Server Code
socket.on('radio', function(blob) {
// can choose to broadcast it to whoever you want
socket.broadcast.emit('voice', blob);
});
In the Client Code you can write setInterval() instead of setTimeout() and then recursively call mediaRecorder.start() so that every 5 seconds the blob will be emitted continuously.
setInterval(function() {
mediaRecorder.stop()
mediaRecorder.start()
}, 5000);
Client Code
var constraints = { audio: true };
navigator.mediaDevices.getUserMedia(constraints).then(function(mediaStream) {
var mediaRecorder = new MediaRecorder(mediaStream);
mediaRecorder.onstart = function(e) {
this.chunks = [];
};
mediaRecorder.ondataavailable = function(e) {
this.chunks.push(e.data);
};
mediaRecorder.onstop = function(e) {
var blob = new Blob(this.chunks, { 'type' : 'audio/ogg; codecs=opus' });
socket.emit('radio', blob);
};
// Start recording
mediaRecorder.start();
// Stop recording after 5 seconds and broadcast it to server
setInterval(function() {
mediaRecorder.stop()
mediaRecorder.start()
}, 5000);
});
// When the client receives a voice message it will play the sound
socket.on('voice', function(arrayBuffer) {
var blob = new Blob([arrayBuffer], { 'type' : 'audio/ogg; codecs=opus' });
var audio = document.createElement('audio');
audio.src = window.URL.createObjectURL(blob);
audio.play();
});
Server Code
socket.on('voice', function(blob) {
// can choose to broadcast it to whoever you want
socket.broadcast.emit('voice', blob);
});