Chrome Web Audio doesn't play my audio - google-chrome

I just have the most basic (I think) implementation of Chrome's Web Audio.
I'm on Chrome 13 with Web Audio enabled, but my sound just doesn't play. I see it gets loaded (http://pieterhordijk.com/sandbox/html5-audio-api/webkit-audiocontext-interface), but it just doesn't play.
window.onload = init;
var context;
var source;
function loadSample(url) {
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
request.onload = function() {
source.buffer = context.createBuffer(request.response, false);
source.looping = true;
source.noteOn(0);
}
request.send();
}
function init()
{
context = new webkitAudioContext();
source = context.createBufferSource();
loadSample("/sandbox/test.oga");
}

Ok found the answer myself I had to connect it to the output.
source.connect(context.destination);

Related

How to release memory using Web Audio API?

var context = new window.AudioContext()
var request = cc.loader.getXMLHttpRequest();
request.open("GET", 'res/raw-assets/resources/audio/bgm.mp3', true);
request.responseType = "arraybuffer";
request.onload = function () {
context["decodeAudioData"](request.response, function(buffer){
//success
cc.log('success')
window.buffer = buffer
playBgm()
}, function(){
//error
});
};
request.onerror = function(){
//error
};
request.send();
function playBgm(){
var audio = context["createBufferSource"]();
audio.buffer = buffer;
var _volume = context['createGain']();
_volume['gain'].value = 1;
_volume['connect'](context['destination']);
audio["connect"](_volume);
audio.start(0)
}
in my code I load a mp3 file and decode it into AudioBuffer(window.buffer)
then I play it successful
but it cost a lot memory about 100MB
How to release them?
I tried like this
audio.stop()
audio = null
window.buffer = null
//context.close()
//context = null
chrome memory view
in the chrome memory view
sometimes the memory release in about 10second
sometimes release in about 1min
sometimes seems never release
I want to know if my code is the right way to release audiobuffer?
Do I need to close AudioContext?

send audio using html5 websockets

My requirement is to send audio chunks captured from microphone to the server using html5 web socket connection.
Establish a web socket connection with the server.When user is speaking capture small chunks of audio and send to the server. This should happen till the user stops speaking for 10 seconds, then close the web socket connection. Again open the web socket connection when user starts speaking.
I know how to open a connection and send the audio for the first time.
I have two buttons. onClick of one button(startRecording) i will open web socket connection and i am recording the audio.
onClick of second button(stopRecording) i am sending the audio to the server.
But my problem is how to do this with out buttons and it has to record the audio till user pause and send to the server.
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var recorder;
var audio = document.querySelector('audio');
var websocket ;
var text;
var onSuccess = function(s) {
var context = new AudioContext();
var mediaStreamSource = context.createMediaStreamSource(s);
recorder = new Recorder(mediaStreamSource);
recorder.record();
// audio loopback
// mediaStreamSource.connect(context.destination);
}
function startRecording() {
var wsURI = "url";
websocket = new WebSocket(wsURI);
websocket.onopen = function(evt) {
onOpen(evt)
};
websocket.onclose = function(evt) { onClose() };
websocket.onmessage = function(evt) { onMessage(evt) };
websocket.onerror = function(evt) { onError(evt) };
function onOpen(evt) {
var message = {
'action': 'start',
//'content-type': 'audio/l16;rate=22050'
'content-type': 'audio/wav;rate=22050'
};
websocket.send(JSON.stringify(message));
}
function onMessage(evt) {
console.log(evt.data);
//console.log(JSON.parse(evt.data))
}
}
function onClose() {
websocket.close();
}
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true}, onSuccess, onFail);
} else {
console.log('navigator.getUserMedia not present');
}
}
function stopRecording() {
recorder.stop();
recorder.exportWAV(function(s) {
//audio.src = window.URL.createObjectURL(s);
websocket.send(s);
//websocket.onclose();
});
}

Download link for video file

I have a video for which I want to provide a download link. However, having created a simple Download tag, when I click on it (in Firefox & Chrome) it starts playing the video instead of allowing the video to be downloaded. Is there a way that works in all current browsers to force them to offer the save-as dialog?
Try using the download attribute.
<a href="myvideo.mp4" download>Download</a>
More at:
http://www.w3schools.com/tags/att_a_download.asp
you should also try this.
if (isVideo) {
var div = document.createElement('div');
div.className = "column";
var vid = document.createElement('video');
var source = document.createElement('source');
source.type = "video/mp4";
source.src = display_src;
vid.appendChild(source);
vid.poster;
vid.controls = true;
var alink = document.createElement('a');
alink.href = display_src;
alink.id = 'downlo_click';
}
alink.text = "Repost"
// window.open(alink, '_self');
div.appendChild(vid);
div.appendChild(alink);
document.getElementById('gamediv').appendChild(div)
document.getElementById('downlo_click').addEventListener('click', function() {
var x = new XMLHttpRequest();
x.open("GET", display_src, true);
x.responseType = 'blob';
x.onload = function(e) {
download(x.response, "abcd.mp4", "video/mp4");
}
x.send();
// window.open(alink, '_self');
// download("data:text/html,"display_src, display_src);
});
}

HTML5 web audio controls

I have music play example http://www.smartjava.org/examples/webaudio/example3.html
And i need to show html5 audio player (with controls) for this song. How i can do it?
Javascript code from example below:
// create the audio context (chrome only for now)
// create the audio context (chrome only for now)
if (! window.AudioContext) {
if (! window.webkitAudioContext) {
alert('no audiocontext found');
}
window.AudioContext = window.webkitAudioContext;
}
var context = new AudioContext();
var audioBuffer;
var sourceNode;
var analyser;
var javascriptNode;
// get the context from the canvas to draw on
var ctx = $("#canvas").get()[0].getContext("2d");
// create a gradient for the fill. Note the strange
// offset, since the gradient is calculated based on
// the canvas, not the specific element we draw
var gradient = ctx.createLinearGradient(0,0,0,300);
gradient.addColorStop(1,'#000000');
gradient.addColorStop(0.75,'#ff0000');
gradient.addColorStop(0.25,'#ffff00');
gradient.addColorStop(0,'#ffffff');
// load the sound
setupAudioNodes();
loadSound("http://www.audiotreasure.com/mp3/Bengali/04_john/04_john_04.mp3");
function setupAudioNodes() {
// setup a javascript node
javascriptNode = context.createScriptProcessor(2048, 1, 1);
// connect to destination, else it isn't called
javascriptNode.connect(context.destination);
// setup a analyzer
analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 512;
// create a buffer source node
sourceNode = context.createBufferSource();
sourceNode.connect(analyser);
analyser.connect(javascriptNode);
sourceNode.connect(context.destination);
}
// load the specified sound
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// When loaded decode the data
request.onload = function() {
// decode the data
context.decodeAudioData(request.response, function(buffer) {
// when the audio is decoded play the sound
playSound(buffer);
}, onError);
}
request.send();
}
function playSound(buffer) {
sourceNode.buffer = buffer;
sourceNode.start(0);
}
// log if an error occurs
function onError(e) {
console.log(e);
}
// when the javascript node is called
// we use information from the analyzer node
// to draw the volume
javascriptNode.onaudioprocess = function() {
// get the average for the first channel
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
// clear the current state
ctx.clearRect(0, 0, 1000, 325);
// set the fill style
ctx.fillStyle=gradient;
drawSpectrum(array);
}
function drawSpectrum(array) {
for ( var i = 0; i < (array.length); i++ ){
var value = array[i];
ctx.fillRect(i*5,325-value,3,325);
// console.log([i,value])
}
};
I think what you want is to use an audio tag for your source and use createMediaElementSource to pass the audio to webaudio for visualization.
Beware that createMediaElementSource checks for CORS access so you must have appropriate cross-origin access for this to work. (It looks like your audio source doesn't return the appropriate access headers for this to work.)

How to get media stream object form HTML5 video element in javascript

all
I'm in peer to peer communication using webRTC , we have media stream object from the getUserMedia which is given as input stream to peerconnection. Here I need video stream from the selected video file from the local drive which is playing using Video element of HTML5.
Is it possible to create mediastream object from the video tag?
thanks,
suri
For now you can't add a media stream from a video tag, but it should be possible in the future, as it is explained on MDN
MediaStream objects have a single input and a single output. A MediaStream object generated by getUserMedia() is called local, and has as its source input one of the user's cameras or microphones. A non-local MediaStream may be representing to a media element, like or , a stream originating over the network, and obtained via the WebRTC PeerConnection API, or a stream created using the Web Audio API MediaStreamAudioSourceNode.
But you can use Media Source Extensions API to do what yo want : you have to put the local file into a stream and append in in a MediaSource object. You can learn more about MSE here : http://www.w3.org/TR/media-source/
And you can find a demo and source of the method above here
2021 update: It is now possible using MediaRecorder interface: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder
Example from same page:
if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
var constraints = { audio: true };
var chunks = [];
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
var mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
record.style.color = "black";
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
var clipName = prompt('Enter a name for your sound clip');
var clipContainer = document.createElement('article');
var clipLabel = document.createElement('p');
var audio = document.createElement('audio');
var deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.innerHTML = "Delete";
clipLabel.innerHTML = clipName;
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
var audioURL = URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
})
.catch(function(err) {
console.log('The following error occurred: ' + err);
})
}
MDN also has a detailed mini tutorial: https://developer.mozilla.org/en-US/docs/Web/API/MediaStream_Recording_API/Recording_a_media_element