Web Audio API Stream: why isn't dataArray changing? - html5-audio

EDIT 2: solved. See answer below.
EDIT 1:
I changed my code a little, added a gain node, moved a function. I also found that IF I use the microphone, it will work. Still doesn't work with usb audio input. Any idea? This is my current code:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
window.onload = function(){
var audioContext = new AudioContext();
var analyser = audioContext.createAnalyser();
var gainNode = audioContext.createGain();
navigator.mediaDevices.getUserMedia({ audio:true, video:false }).then(function(stream){ //MediaStream
var source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
analyser.connect(gainNode);
gainNode.connect(audioContext.destination);
listen();
});
function listen(){
analyser.fftSize = 256;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
var index = 0;
function write(){
requestAnimationFrame(listen);
analyser.getByteTimeDomainData(dataArray);
$('.monitor').html(JSON.stringify(dataArray) + ' -- ' + (index++));
}
write();
}
}
OLD/ORIGINAL POST:
my current code is this, and I currently connected a kewboard via a USB audio interface: I've got signal, already tried with other programs.. So:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
window.onload = function(){
var audioContext = new AudioContext();
var analyser = audioContext.createAnalyser();
navigator.mediaDevices.getUserMedia({ audio:true, video:false }).then(function(stream){ //MediaStream
var source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
analyser.connect(audioContext.destination);
analyser.fftSize = 2048;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
function listen(){
requestAnimationFrame(listen);
analyser.getByteTimeDomainData(dataArray);
$('.monitor').html(JSON.stringify(dataArray));
}
listen();
});
}
While I'm playing my keyboard, the dataArray doesn't change at all. Why? I'm new to this things so probably I'm doing something wrong...

Ok now it's working. My basic current test code is the following. Html has nothing but a div.monitor to write inside. Currently testing on firefox. My hardware is keyboard > mixer > behringer UCA222 > computer (usb). I get data when playing the keyboard and I'm happy now.
There are several differences from the original code, but I think the most important is that I'm saving the media source globally (window.audiosource). There are other posts here about a related issue, for example this: Chrome: onaudioprocess stops getting called after a while and this HTML5 Microphone capture stops after 5 seconds in Firefox.
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var audioContext = new (window.AudioContext || window.webkitAudioContext)();
var analyser = audioContext.createAnalyser();
if(navigator.getUserMedia){
navigator.getUserMedia(
{ audio: true }
,function(stream){
window.audiosource = audioContext.createMediaStreamSource(stream);
audiosource.connect(analyser);
listen();
}
,function(err){ console.log('The following gUM error occured: ' + err); }
);
}
function listen(){
requestAnimationFrame(listen);
analyser.fftSize = 256;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
$('.monitor').html(JSON.stringify(dataArray));
}

Related

Error in Apple's TVML documentation? pushPage function doesn't work

UPDATED 6/1/17 with the correct code pasted at the bottom.
I'm working through Apple's TVML guide, section 2: Navigating Between Pages. (https://developer.apple.com/library/content/documentation/TVMLKitJS/Conceptual/TVMLProgrammingGuide/NavigatingBetweenPages.html#//apple_ref/doc/uid/TP40016718-CH9-SW1)
Everything is fine until the last bit (Listing 4-4), which allow you to use the menu button on the remote to return to the previous page. Whenever I try it, my sample app simply won't load:
var baseURL;
function loadingTemplate() {
var template = '<document><loadingTemplate><activityIndicator><text>Loading</text></activityIndicator></loadingTemplate></document>';
var templateParser = new DOMParser();
var parsedTemplate = templateParser.parseFromString(template, "application/xml");
return parsedTemplate;
}
function getDocument(extension) {
var templateXHR = new XMLHttpRequest();
var url = baseURL + extension;
var loadingScreen = loadingTemplate();
templateXHR.responseType = "document";
templateXHR.addEventListener("load", function() {pushPage(templateXHR.responseXML, loadingScreen);}, false);
templateXHR.open("GET", url, true);
templateXHR.send();
}
function pushPage(page, loading) {
var currentDoc = getActiveDocument();
navigationDocument.replaceDocument(page, loading);
}
App.onLaunch = function(options) {
baseURL = options.BASEURL;
var extension = "templates/InitialPage.xml";
getDocument(extension);
}
What am I missing?
This works:
var baseURL;
function loadingTemplate() {
var template = '<document><loadingTemplate><activityIndicator><text>Loading</text></activityIndicator></loadingTemplate></document>';
var templateParser = new DOMParser();
var parsedTemplate = templateParser.parseFromString(template, "application/xml");
navigationDocument.pushDocument(parsedTemplate);
return parsedTemplate;
}
function getDocument(extension) {
var templateXHR = new XMLHttpRequest();
var url = baseURL + extension;
var loadingScreen = loadingTemplate();
templateXHR.responseType = "document";
templateXHR.addEventListener("load", function() {pushPage(templateXHR.responseXML, loadingScreen);}, false);
templateXHR.open("GET", url, true);
templateXHR.send();
}
function pushPage(page, loading) {
navigationDocument.replaceDocument(page, loading);
}
App.onLaunch = function(options) {
baseURL = options.BASEURL;
var extension = "templates/InitialPage.xml";
getDocument(extension);
}
Yes, I believe there is a mistake. They should have kept the line
navigationDocument.pushDocument(parsedTemplate);
at the end of the loadingTemplate method.
The idea is to push the loading page, then replace it with the new page.
On a side note, the line
var currentDoc = getActiveDocument();
has no business here. This code was obviously not tested or reviewed.

Web Audio API, Recorder.js: Recording from sourceBuffer without delay

I am trying to create a Web Audio API based application. So far I have multiple buffer nodes, connected to the destination.
What I trying to achieve is to be able to record the output result when the user (for example) presses a button, I tried using recorder.js but as far as I understand you need to play the graph along.
The following code depicts the issue:
<html><body><audio controls autoplay></audio>
<script type="text/javascript" src="recorder.js"> </script>
<input onclick="startRecording()" type="button" value="start recording" />
<input onclick="stopRecording()" type="button" value="stop recording and play" />
<script>
var context = new webkitAudioContext();
var request = new XMLHttpRequest();
var onFail = function(e) {
console.log('Rejected!', e);
};
var onSuccess = function(s) {
request.open('GET', 'voice.wav', true);
request.responseType = 'arraybuffer';
request.onload = function () {
var undecodedAudio = request.response;
context.decodeAudioData(undecodedAudio, function (buffer) {
// The contents of our mp3 is now an AudioBuffer
var sourceBuffer = context.createBufferSource();
sourceBuffer.buffer = buffer;
sourceBuffer.connect (context.destination);
recorder = new Recorder(sourceBuffer);
recorder.record();
sourceBuffer.start (context.currentTime);
});
};
request.send();
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
function startRecording() {
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
audio.src = window.URL.createObjectURL(s);
});
}
</script></body></html>
What I want is when the user presses recording, the file is converted to resultant audio without actually playback.

Stream video through socket to html5 video tag

Hello i`ve been trying to stream a webm video through a socket.io socket directly to the html5 video tag. The client and server code follows below:
Server:
(function() {
var Alert, Channel, Receiver, Takeover, express, pathLib;
pathLib = require("path");
fs = require("fs");
express = require("express");
module.exports = function(app, sockets) {
router = express.Router();
router.get("/clearAlerts", function(req, res) {
console.log("reached!");
return sockets.emit("alert-deleted");
});
router.get("/castVideo", function(req, res) {
//move this to a better place
console.log("reachedCastVideoss");
var readStream = fs.createReadStream(pathLib.join(__dirname + "/../../../public/elephants-dream.webm"));
readStream.addListener('data', function(data) {
console.log("cast-video emitted");
sockets.emit('cast-video', data);
});
});
return app.use('/custom/', router);
};
}).call(this);
Client:
var socket = io.connect('http://localhost:4994');
window.URL = window.URL || window.webkitURL;
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
var mediaSource = new MediaSource();
var video = document.getElementById("video");
var queue = [];
var sourceBuffer;
var firstChunk = true;
video.src = window.URL.createObjectURL(mediaSource);
streamIt = function(e) {
video.pause();
mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
mediaSource.sourceBuffers[0].addEventListener('updateend', onBufferUpdated);
socket.on("cast-video", function(data) {
console.log("appending to buffer");
var uIntArray = new Uint8Array(data);
if (firstChunk) {
mediaSource.sourceBuffers[0].appendBuffer(uIntArray);
firstChunk = false;
}
queue.push(uIntArray);
if (queue.length === 33) {
//mediaSource.endOfStream();
}
});
var onBufferUpdated = function() {
if (queue.length) {
mediaSource.sourceBuffers[0].appendBuffer(queue.shift());
}
};
};
mediaSource.addEventListener('sourceopen', streamIt);
mediaSource.addEventListener('webkitsourceopen', streamIt);
When I try to run this code, It seems that the first chunk of the stream is appended
to the sourceBuffer, I can see the first frame(title and an url) of the video file im trying to play, but thats it. It seems that only the first call appendBuffer works. I read somewhere something about a required initialization segment for the video to play, but I also saw an working example that does not use this initialization segment, so im a little confuse.(link to the example)
Can anyone clarify if I really need this initial segment? If I do, how can I retrieve the byte range of this segment? Or if I dont need this segment, what is wrong in my code? Thank you.
Trying a little bit more today,Ive found that if I use the same file from http://html5-demos.appspot.com/static/media-source.html, this code actually works. When I try with the files from
http://www.webmfiles.org/demo-files, the code does not works. I have no idea why.

decodeAudioData failing with null errors on continuous stream

In my following code ffmpeg is transcoding the input stream and is successfully sending the chunks to the client. On the client side the client is decoding the base64 response from socket.io and is converting the response to an array buffer. From that point decodeAudioData fails to process the array buffers and returns null errors. Does anyone know why decodeAudioData isn't working?
./webaudio_svr.js:
var express = require('/usr/local/lib/node_modules/express');
var http = require('http');
var spawn = require('child_process').spawn;
var util = require('util');
var fs = require('fs');
var app = express();
var webServer = http.createServer(app);
var audServer = http.createServer(app);
var io = require('/usr/local/lib/node_modules/socket.io').listen(webServer, {log: false, });
app.use(express.static(__dirname + '/public'));
app.get('/', function(req, res){
res.send(
"<script src='/socket.io/socket.io.js'></script>\n"+
"<script>var socket=io.connect('http://127.0.0.1:3000');</script>\n"+
"<script src='/webaudio_cli.js'></script>"
);
});
webServer.listen(3000);
var inputStream = spawn('/usr/bin/wget', ['-O','-','http://nprdmp.ic.llnwd.net/stream/nprdmp_live01_mp3']);
var ffmpeg = spawn('ffmpeg', [
'-i', 'pipe:0', // Input on stdin
'-ar', '44100', // Sampling rate
'-ac', 2, // Stereo
'-f', 'mp3',
'pipe:1' // Output on stdout
]);
io.sockets.on('connection', function(webSocket) {
var disconnect = '0';
if (disconnect == '0') {
inputStream.stdout.pipe(ffmpeg.stdin);
ffmpeg.stdout.on('data', function(data) {
var data64 = data.toString('base64');
webSocket.emit('stream',data64);
});
}
webSocket.on('disconnect', function() {
disconnect=1;
});
});
./public/webaudio_cli.js:
function str2ab(str) {
var buf = new ArrayBuffer(str.length*2); // 2 bytes for each char
var bufView = new Uint16Array(buf);
for (var i=0, strLen=str.length; i<strLen; i++) {
bufView[i] = str.charCodeAt(i);
}
return buf;
}
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var source = context.createBufferSource();
socket.on('stream', function(data) {
var data=str2ab(atob(data));
context.decodeAudioData(data, function(buffer) {
source.connect(context.destination);
source.buffer = buffer;
source.start(0);
}, function(err) {
console.log("err(decodeAudioData): "+err);
});
});
If you read the notes section of the original post you'll see that I ended up getting this working with binaryjs but with the help of Kevin I was able to get this to work with socket.io. Note there this is still a HUGE issue with choppy playback. If someone could lend some assistance to cleaning the audio up please do. This solution is really pointless unless the audio works as expected so I need to figure that out.
The issue has to do with how the browser encodes/decodes your base64 string. Until this is changed you must supply your own functions from https://developer.mozilla.org/en-US/docs/Web/JavaScript/Base64_encoding_and_decoding.
webaudio_svr.js:
var express = require('/usr/local/lib/node_modules/express');
var http = require('http');
var spawn = require('child_process').spawn;
var util = require('util');
var fs = require('fs');
var app = express();
var webServer = http.createServer(app);
var io = require('/usr/local/lib/node_modules/socket.io').listen(webServer, {log: false, });
app.use(express.static(__dirname + '/public'));
app.get('/', function(req, res){
res.send(
"<script src='/socket.io/socket.io.js'></script>\n"+
"<script>var socket=io.connect('http://127.0.0.1:3000');</script>\n"+
"<script src='/base64.js'></script>\n"+
"<script src='/webaudio_cli.js'></script>"
);
});
webServer.listen(3000);
var inputStream = spawn('/usr/bin/wget', ['-O','-','http://nprdmp.ic.llnwd.net/stream/nprdmp_live01_mp3']);
var ffmpeg = spawn('ffmpeg', [
'-i', 'pipe:0', // Input on stdin
'-ar', '44100', // Sampling rate
'-ac', 2, // Stereo
'-f', 'mp3',
'pipe:1' // Output on stdout
]);
io.sockets.on('connection', function(webSocket) {
var disconnect = '0';
if (disconnect == '0') {
inputStream.stdout.pipe(ffmpeg.stdin);
ffmpeg.stdout.on('data', function(data) {
var data64 = data.toString('base64');
webSocket.emit('stream',data64);
});
}
webSocket.on('disconnect', function() {
disconnect=1;
});
});
public/webaudio_cli.js:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var startTime = context.currentTime;
// buffer to arraybuffer
function toArrayBuffer(buffer) {
var ab = new ArrayBuffer(buffer.length);
var view = new Uint8Array(ab);
for (var i = 0; i < buffer.length; ++i) {
view[i] = buffer[i];
}
return ab;
}
socket.on('stream', function(data) {
var data=toArrayBuffer(base64DecToArr(data));
context.decodeAudioData(data, function(buffer) {
playBuffer(buffer);
}, function(err) {
console.log("decodeAudioData err: "+err);
});
});
function playBuffer(buf) {
var source = context.createBufferSource();
source.buffer = buf;
source.connect(context.destination);
source.start(startTime);
startTime = startTime+source.buffer.duration;
}
public/base64.js:
copy and paste functions from https://developer.mozilla.org/en-US/docs/Web/JavaScript/Base64_encoding_and_decoding#Solution_.232_.E2.80.93_rewriting_atob()_and_btoa()_using_TypedArrays_and_UTF-8

How to get media stream object form HTML5 video element in javascript

all
I'm in peer to peer communication using webRTC , we have media stream object from the getUserMedia which is given as input stream to peerconnection. Here I need video stream from the selected video file from the local drive which is playing using Video element of HTML5.
Is it possible to create mediastream object from the video tag?
thanks,
suri
For now you can't add a media stream from a video tag, but it should be possible in the future, as it is explained on MDN
MediaStream objects have a single input and a single output. A MediaStream object generated by getUserMedia() is called local, and has as its source input one of the user's cameras or microphones. A non-local MediaStream may be representing to a media element, like or , a stream originating over the network, and obtained via the WebRTC PeerConnection API, or a stream created using the Web Audio API MediaStreamAudioSourceNode.
But you can use Media Source Extensions API to do what yo want : you have to put the local file into a stream and append in in a MediaSource object. You can learn more about MSE here : http://www.w3.org/TR/media-source/
And you can find a demo and source of the method above here
2021 update: It is now possible using MediaRecorder interface: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder
Example from same page:
if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
var constraints = { audio: true };
var chunks = [];
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
var mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
record.style.color = "black";
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var clipName = prompt('Enter a name for your sound clip');
var clipContainer = document.createElement('article');
var clipLabel = document.createElement('p');
var audio = document.createElement('audio');
var deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.innerHTML = "Delete";
clipLabel.innerHTML = clipName;
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
var audioURL = URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
})
.catch(function(err) {
console.log('The following error occurred: ' + err);
})
}
MDN also has a detailed mini tutorial: https://developer.mozilla.org/en-US/docs/Web/API/MediaStream_Recording_API/Recording_a_media_element