i'm trying to stream a rtsp live stream through socket.io using ffmpeg (this works fine), but now i need to get that video from the socket and play it on a HTML5 video tag.
To do this i'm using MediaSurce, getting small pieces of video through the socket and then appending it to the MediaSource
This solution reproduces the video a few seconds o minutes and then suddenly stops
and it doesn't throw me any error on the Chrome console
var socket = io();
var ms = new MediaSource();
var sourceBuffer;
var queue = [];
var video = document.getElementById("video");
video.src = window.URL.createObjectURL(ms);
socket.on('start', function (response) {
console.log(response);
socket.emit('streaming', $stateParams.id);
ms.addEventListener('sourceopen', videoLoad, false);
ms.addEventListener('sourceclose', videoClosed, false);
});
function videoLoad() {
sourceBuffer = ms.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
sourceBuffer.addEventListener('update', function () {
if (queue.length > 0 && !sourceBuffer.updating) {
console.log(queue.length);
sourceBuffer.appendBuffer(queue.shift());
}
});
socket.on('data', function (response) {
var bytes = new Uint8Array(response);
var blob = new Blob(bytes);
console.log(blob.size);
if (sourceBuffer.updating || queue.length > 0) {
queue.push(bytes);
} else {
sourceBuffer.appendBuffer(bytes);
}
});
}
function videoClosed(e) {
console.log('mediaSource readyState: ' + this.readyState);
}
On my chrome://media-internals/ the video players log show me a couple of time this, and then the video stops
video_buffering_state BUFFERING_HAVE_ENOUGH
Related
My requirement is to send audio chunks captured from microphone to the server using html5 web socket connection.
Establish a web socket connection with the server.When user is speaking capture small chunks of audio and send to the server. This should happen till the user stops speaking for 10 seconds, then close the web socket connection. Again open the web socket connection when user starts speaking.
I know how to open a connection and send the audio for the first time.
I have two buttons. onClick of one button(startRecording) i will open web socket connection and i am recording the audio.
onClick of second button(stopRecording) i am sending the audio to the server.
But my problem is how to do this with out buttons and it has to record the audio till user pause and send to the server.
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
var websocket ;
var text;
var onSuccess = function(s) {
var context = new AudioContext();
var mediaStreamSource = context.createMediaStreamSource(s);
recorder = new Recorder(mediaStreamSource);
recorder.record();
// audio loopback
// mediaStreamSource.connect(context.destination);
}
function startRecording() {
var wsURI = "url";
websocket = new WebSocket(wsURI);
websocket.onopen = function(evt) {
onOpen(evt)
};
websocket.onclose = function(evt) { onClose() };
websocket.onmessage = function(evt) { onMessage(evt) };
websocket.onerror = function(evt) { onError(evt) };
function onOpen(evt) {
var message = {
'action': 'start',
//'content-type': 'audio/l16;rate=22050'
'content-type': 'audio/wav;rate=22050'
};
websocket.send(JSON.stringify(message));
}
function onMessage(evt) {
console.log(evt.data);
//console.log(JSON.parse(evt.data))
}
}
function onClose() {
websocket.close();
}
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
//audio.src = window.URL.createObjectURL(s);
websocket.send(s);
//websocket.onclose();
});
}
In appCtrl.js, for saving video file -
$('#save_file').click(function(e) {
var config = {type: 'saveFile', suggestedName: chosenEntry.name};
chrome.fileSystem.chooseEntry(config, function(writableEntry) {
//blob content is the DataUrl
var blob = new Blob([$scope.blobContent], {type: 'video/mp4'});
$scope.writeFileEntry(writableEntry, blob, function(e) {
console.log('Write complete :)');
});
});
});
$scope.writeFileEntry = function(writableEntry, opt_blob, callback) {
if (!writableEntry) {
console.log('Nothing selected.');
return;
}
writableEntry.createWriter(function(writer) {
writer.onerror = $scope.errorHandler;
writer.onwriteend = callback;
// If we have data, write it to the file. Otherwise, just use the file we
// loaded.
if (opt_blob) {
writer.truncate(opt_blob.size);
$scope.waitForIO(writer, function() {
writer.seek(0);
writer.write(opt_blob);
});
}
else {
chosenEntry.file(function(file) {
writer.truncate(file.fileSize);
waitForIO(writer, function() {
writer.seek(0);
writer.write(file);
});
});
}
}, $scope.errorHandler);
}
$scope.waitForIO = function(writer, callback) {
// set a watchdog to avoid eventual locking:
var start = Date.now();
// wait for a few seconds
var reentrant = function() {
if (writer.readyState===writer.WRITING && Date.now()-start<4000) {
setTimeout(reentrant, 100);
return;
}
if (writer.readyState===writer.WRITING) {
console.error("Write operation taking too long, aborting!"+
" (current writer readyState is "+writer.readyState+")");
writer.abort();
}
else {
callback();
}
};
setTimeout(reentrant, 100);
};
In above code the video file is saved but when i tried to play that saved file in Window Media Player or VLC player , it prompt me as Window media player cannot play the file.The player might not support the file type or might not support the codec that was used to compress the file.
Can u please guide me where m getting wrong, as its my first chrome app.
Thanks in advance.
Change the method to store blob like this.
function dataURItoBlob(dataURI, callback) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], {type: 'video/mp4'});
};
To handle click.
$('#save_file').click(function(e) {
var config = {type: 'saveFile', suggestedName: chosenEntry.name};
chrome.fileSystem.chooseEntry(config, function(writableEntry) {
var blob = dataURItoBlob($scope.blobContent);
$scope.writeFileEntry(writableEntry, blob, function(e) {
console.log('Write complete :)');
});
});
});
I have music play example http://www.smartjava.org/examples/webaudio/example3.html
And i need to show html5 audio player (with controls) for this song. How i can do it?
Javascript code from example below:
// create the audio context (chrome only for now)
// create the audio context (chrome only for now)
if (! window.AudioContext) {
if (! window.webkitAudioContext) {
alert('no audiocontext found');
}
window.AudioContext = window.webkitAudioContext;
}
var context = new AudioContext();
var audioBuffer;
var sourceNode;
var analyser;
var javascriptNode;
// get the context from the canvas to draw on
var ctx = $("#canvas").get()[0].getContext("2d");
// create a gradient for the fill. Note the strange
// offset, since the gradient is calculated based on
// the canvas, not the specific element we draw
var gradient = ctx.createLinearGradient(0,0,0,300);
gradient.addColorStop(1,'#000000');
gradient.addColorStop(0.75,'#ff0000');
gradient.addColorStop(0.25,'#ffff00');
gradient.addColorStop(0,'#ffffff');
// load the sound
setupAudioNodes();
loadSound("http://www.audiotreasure.com/mp3/Bengali/04_john/04_john_04.mp3");
function setupAudioNodes() {
// setup a javascript node
javascriptNode = context.createScriptProcessor(2048, 1, 1);
// connect to destination, else it isn't called
javascriptNode.connect(context.destination);
// setup a analyzer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 512;
// create a buffer source node
sourceNode = context.createBufferSource();
sourceNode.connect(analyser);
analyser.connect(javascriptNode);
sourceNode.connect(context.destination);
}
// load the specified sound
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// When loaded decode the data
request.onload = function() {
// decode the data
context.decodeAudioData(request.response, function(buffer) {
// when the audio is decoded play the sound
playSound(buffer);
}, onError);
}
request.send();
}
function playSound(buffer) {
sourceNode.buffer = buffer;
sourceNode.start(0);
}
// log if an error occurs
function onError(e) {
console.log(e);
}
// when the javascript node is called
// we use information from the analyzer node
// to draw the volume
javascriptNode.onaudioprocess = function() {
// get the average for the first channel
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
// clear the current state
ctx.clearRect(0, 0, 1000, 325);
// set the fill style
ctx.fillStyle=gradient;
drawSpectrum(array);
}
function drawSpectrum(array) {
for ( var i = 0; i < (array.length); i++ ){
var value = array[i];
ctx.fillRect(i*5,325-value,3,325);
// console.log([i,value])
}
};
I think what you want is to use an audio tag for your source and use createMediaElementSource to pass the audio to webaudio for visualization.
Beware that createMediaElementSource checks for CORS access so you must have appropriate cross-origin access for this to work. (It looks like your audio source doesn't return the appropriate access headers for this to work.)
Hello i`ve been trying to stream a webm video through a socket.io socket directly to the html5 video tag. The client and server code follows below:
Server:
(function() {
var Alert, Channel, Receiver, Takeover, express, pathLib;
pathLib = require("path");
fs = require("fs");
express = require("express");
module.exports = function(app, sockets) {
router = express.Router();
router.get("/clearAlerts", function(req, res) {
console.log("reached!");
return sockets.emit("alert-deleted");
});
router.get("/castVideo", function(req, res) {
//move this to a better place
console.log("reachedCastVideoss");
var readStream = fs.createReadStream(pathLib.join(__dirname + "/../../../public/elephants-dream.webm"));
readStream.addListener('data', function(data) {
console.log("cast-video emitted");
sockets.emit('cast-video', data);
});
});
return app.use('/custom/', router);
};
}).call(this);
Client:
var socket = io.connect('http://localhost:4994');
window.URL = window.URL || window.webkitURL;
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
var mediaSource = new MediaSource();
var video = document.getElementById("video");
var queue = [];
var sourceBuffer;
var firstChunk = true;
video.src = window.URL.createObjectURL(mediaSource);
streamIt = function(e) {
video.pause();
mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
mediaSource.sourceBuffers[0].addEventListener('updateend', onBufferUpdated);
socket.on("cast-video", function(data) {
console.log("appending to buffer");
var uIntArray = new Uint8Array(data);
if (firstChunk) {
mediaSource.sourceBuffers[0].appendBuffer(uIntArray);
firstChunk = false;
}
queue.push(uIntArray);
if (queue.length === 33) {
//mediaSource.endOfStream();
}
});
var onBufferUpdated = function() {
if (queue.length) {
mediaSource.sourceBuffers[0].appendBuffer(queue.shift());
}
};
};
mediaSource.addEventListener('sourceopen', streamIt);
mediaSource.addEventListener('webkitsourceopen', streamIt);
When I try to run this code, It seems that the first chunk of the stream is appended
to the sourceBuffer, I can see the first frame(title and an url) of the video file im trying to play, but thats it. It seems that only the first call appendBuffer works. I read somewhere something about a required initialization segment for the video to play, but I also saw an working example that does not use this initialization segment, so im a little confuse.(link to the example)
Can anyone clarify if I really need this initial segment? If I do, how can I retrieve the byte range of this segment? Or if I dont need this segment, what is wrong in my code? Thank you.
Trying a little bit more today,Ive found that if I use the same file from http://html5-demos.appspot.com/static/media-source.html, this code actually works. When I try with the files from
http://www.webmfiles.org/demo-files, the code does not works. I have no idea why.
all
I'm in peer to peer communication using webRTC , we have media stream object from the getUserMedia which is given as input stream to peerconnection. Here I need video stream from the selected video file from the local drive which is playing using Video element of HTML5.
Is it possible to create mediastream object from the video tag?
thanks,
suri
For now you can't add a media stream from a video tag, but it should be possible in the future, as it is explained on MDN
MediaStream objects have a single input and a single output. A MediaStream object generated by getUserMedia() is called local, and has as its source input one of the user's cameras or microphones. A non-local MediaStream may be representing to a media element, like or , a stream originating over the network, and obtained via the WebRTC PeerConnection API, or a stream created using the Web Audio API MediaStreamAudioSourceNode.
But you can use Media Source Extensions API to do what yo want : you have to put the local file into a stream and append in in a MediaSource object. You can learn more about MSE here : http://www.w3.org/TR/media-source/
And you can find a demo and source of the method above here
2021 update: It is now possible using MediaRecorder interface: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder
Example from same page:
if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
var constraints = { audio: true };
var chunks = [];
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
var mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
record.style.color = "black";
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var clipName = prompt('Enter a name for your sound clip');
var clipContainer = document.createElement('article');
var clipLabel = document.createElement('p');
var audio = document.createElement('audio');
var deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.innerHTML = "Delete";
clipLabel.innerHTML = clipName;
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
var audioURL = URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
})
.catch(function(err) {
console.log('The following error occurred: ' + err);
})
}
MDN also has a detailed mini tutorial: https://developer.mozilla.org/en-US/docs/Web/API/MediaStream_Recording_API/Recording_a_media_element