I wrote test code for recording in the webpage, and the exported wav file can be played, but totally wrong, just a few meaningless sound, and the duration in VLC is not right. Please have a look is there any wrong in the code.
<html>
<head></head>
<body>
<script src="https://code.jquery.com/jquery-3.1.0.min.js"></script>
<button id='start'>Start</button>
<button id='stop'>Stop</button>
<button id='export'>Export</button>
<script>
var isRecording = false;
var buf = [];
var totalLen = 0;
var channelNum = 1;
var sampleRate = 48000;
$('#start').click(function()
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
var cxt = new AudioContext();
isRecording = true;
navigator.getUserMedia({audio:true}, function(stream){
var streamNode = cxt.createMediaStreamSource(stream);
var processNode = (cxt.createScriptProcessor || cxt.createJavaScriptNode).call(cxt, 1024 * 2, channelNum, channelNum);
processNode.onaudioprocess = function(e){
if(!isRecording) return;
var f = e.inputBuffer;
var cdata = f.getChannelData(channelNum -1);
buf.push(cdata);
totalLen += cdata.length;
};
streamNode.connect(processNode);
processNode.connect(cxt.destination);
}, function(){});
});
$('#stop').click(function(){
isRecording = false;
});
$('#export').click(function(){
var oneBuf = new Float32Array(totalLen);
var offset = 0;
for(var i=0;i<buf.length;++i){
oneBuf.set(buf[i], offset);
offset += buf[i].length;
}
var encodedBuf = encodeWAV(oneBuf);
var blob = new Blob([encodedBuf], {type:'audio/wav'});
console.log(encodedBuf);
var url = URL.createObjectURL(blob);
var hf = document.createElement('a');
hf.download = '1.wav';
hf.innerHTML = hf.download;
hf.href = url;
$('body').append(hf);
});
function encodeWAV(samples){
var arr = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(arr);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* RIFF chunk length */
view.setUint32(4, 36 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, channelNum, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, channelNum * 2, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);
floatTo16BitPCM(view, 44, samples);
console.log('my samplerate:%d', sampleRate);
return view;
}
function floatTo16BitPCM(view, offset, input) {
for (var i = 0; i < input.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, input[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
function writeString(view, offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
</script>
</body>
</html>
After doing some investigation. I found the problem.
var cdata = f.getChannelData(channelNum -1);
buf.push(cdata);
The variable buf stores sound data from audio buffer, but just stores the buffer reference. The buffer is continuously updated by the low level code.As a consequence, buf stores stores an array of the same buffer. That's why the sound played by VLC meaningless.
After changing to the below, the code works fine:
var cdata = f.getChannelData(channelNum -1);
buf.push(cdata.slice());
But still don't know why the duration value is incorrect in VLC currently.
Related
I'm trying to get a spectroscope (sine wave) to show while an audio file is playing. The audio file plays fine but the graph only shows a flat line:
function play() {
var audio = document.querySelector('audio');
audio.load();
audio.play();
var context = new (window.AudioContext || window.webkitAudioContext)();
var source = context.createMediaElementSource(audio);
var analyser = context.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.smoothingTimeConstant = 0.85;
source.connect(context.destination);
var canvas = document.querySelector('canvas');
var canvasCtx = canvas.getContext('2d');
var drawVisual;
var WIDTH = canvas.width;
var HEIGHT = canvas.height;
analyser.fftSize = 2048;
var bufferLength = analyser.fftSize;
var dataArray = new Uint8Array(bufferLength);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
var draw = function() {
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = 'rgb(255, 255, 255)';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(255, 0, 255)';
canvasCtx.beginPath();
var sliceWidth = WIDTH / bufferLength;
var x = 0;
for(var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * HEIGHT / 2;
if(i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
<audio loop src="data:audio/ogg;base64,T2dnUwACAAAAAAAAAABiZQAAAAAAAFvMjyIBHgF2b3JiaXMAAAAAAUSsAAAAAAAAgLsAAAAAAAC4AU9nZ1MAAAAAAAAAAAAAYmUAAAEAAACaF1aWDzv/////////////////MgN2b3JiaXMrAAAAWGlwaC5PcmcgbGliVm9yYmlzIEkgMjAxMjAyMDMgKE9tbmlwcmVzZW50KQAAAAABBXZvcmJpcx9CQ1YBAAABABhjVClGmVLSSokZc5QxRplikkqJpYQWQkidcxRTqTnXnGusubUghBAaU1ApBZlSjlJpGWOQKQWZUhBLSSV0EjonnWMQW0nB1phri0G2HIQNmlJMKcSUUopCCBlTjCnFlFJKQgcldA465hxTjkooQbicc6u1lpZji6l0kkrnJGRMQkgphZJKB6VTTkJINZbWUikdc1JSakHoIIQQQrYghA2C0JBVAAABAMBAEBqyCgBQAAAQiqEYigKEhqwCADIAAASgKI7iKI4jOZJjSRYQGrIKAAACABAAAMBwFEmRFMmxJEvSLEvTRFFVfdU2VVX2dV3XdV3XdSA0ZBUAAAEAQEinmaUaIMIMZBgIDVkFACAAAABGKMIQA0JDVgEAAAEAAGIoOYgmtOZ8c46DZjloKsXmdHAi1eZJbirm5pxzzjknm3PGOOecc4pyZjFoJrTmnHMSg2YpaCa05pxznsTmQWuqtOacc8Y5p4NxRhjnnHOatOZBajbW5pxzFrSmOWouxeaccyLl5kltLtXmnHPOOeecc84555xzqhenc3BOOOecc6L25lpuQhfnnHM+Gad7c0I455xzzjnnnHPOOeecc4LQkFUAABAAAEEYNoZxpyBIn6OBGEWIacikB92jwyRoDHIKqUejo5FS6iCUVMZJKZ0gNGQVAAAIAAAhhBRSSCGFFFJIIYUUUoghhhhiyCmnnIIKKqmkoooyyiyzzDLLLLPMMuuws8467DDEEEMMrbQSS0211VhjrbnnnGsO0lpprbXWSimllFJKKQgNWQUAgAAAEAgZZJBBRiGFFFKIIaaccsopqKACQkNWAQCAAAACAAAAPMlzREd0REd0REd0REd0RMdzPEeUREmUREm0TMvUTE8VVdWVXVvWZd32bWEXdt33dd/3dePXhWFZlmVZlmVZlmVZlmVZlmVZgtCQVQAACAAAgBBCCCGFFFJIIaUYY8wx56CTUEIgNGQVAAAIACAAAADAURzFcSRHciTJkixJkzRLszzN0zxN9ERRFE3TVEVXdEXdtEXZlE3XdE3ZdFVZtV1Ztm3Z1m1flm3f933f933f933f933f93UdCA1ZBQBIAADoSI6kSIqkSI7jOJIkAaEhqwAAGQAAAQAoiqM4juNIkiRJlqRJnuVZomZqpmd6qqgCoSGrAABAAAABAAAAAAAomuIppuIpouI5oiNKomVaoqZqriibsuu6ruu6ruu6ruu6ruu6ruu6ruu6ruu6ruu6ruu6ruu6ruu6LhAasgoAkAAA0JEcyZEcSZEUSZEcyQFCQ1YBADIAAAIAcAzHkBTJsSxL0zzN0zxN9ERP9ExPFV3RBUJDVgEAgAAAAgAAAAAAMCTDUixHczRJlFRLtVRNtVRLFVVPVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVNU3TNE0gNGQlAAAEAMBijcHlICElJeXeEMIQk54xJiG1XiEEkZLeMQYVg54yogxy3kLjEIMeCA1ZEQBEAQAAxiDHEHPIOUepkxI556h0lBrnHKWOUmcpxZhizSiV2FKsjXOOUketo5RiLC12lFKNqcYCAAACHAAAAiyEQkNWBABRAACEMUgppBRijDmnnEOMKeeYc4Yx5hxzjjnnoHRSKuecdE5KxBhzjjmnnHNSOieVc05KJ6EAAIAABwCAAAuh0JAVAUCcAIBBkjxP8jRRlDRPFEVTdF1RNF3X8jzV9ExTVT3RVFVTVW3ZVFVZljzPND3TVFXPNFXVVFVZNlVVlkVV1W3TdXXbdFXdlm3b911bFnZRVW3dVF3bN1XX9l3Z9n1Z1nVj8jxV9UzTdT3TdGXVdW1bdV1d90xTlk3XlWXTdW3blWVdd2XZ9zXTdF3TVWXZdF3ZdmVXt11Z9n3TdYXflWVfV2VZGHZd94Vb15XldF3dV2VXN1ZZ9n1b14Xh1nVhmTxPVT3TdF3PNF1XdV1fV13X1jXTlGXTdW3ZVF1ZdmXZ911X1nXPNGXZdF3bNl1Xll1Z9n1XlnXddF1fV2VZ+FVX9nVZ15Xh1m3hN13X91VZ9oVXlnXh1nVhuXVdGD5V9X1TdoXhdGXf14XfWW5dOJbRdX1hlW3hWGVZOX7hWJbd95VldF1fWG3ZGFZZFoZf+J3l9n3jeHVdGW7d58y67wzH76T7ytPVbWOZfd1ZZl93juEYOr/w46mqr5uuKwynLAu/7evGs/u+soyu6/uqLAu/KtvCseu+8/y+sCyj7PrCasvCsNq2Mdy+biy/cBzLa+vKMeu+UbZ1fF94CsPzdHVdeWZdx/Z1dONHOH7KAACAAQcAgAATykChISsCgDgBAI8kiaJkWaIoWZYoiqbouqJouq6kaaapaZ5pWppnmqZpqrIpmq4saZppWp5mmpqnmaZomq5rmqasiqYpy6ZqyrJpmrLsurJtu65s26JpyrJpmrJsmqYsu7Kr267s6rqkWaapeZ5pap5nmqZqyrJpmq6reZ5qep5oqp4oqqpqqqqtqqosW55nmproqaYniqpqqqatmqoqy6aq2rJpqrZsqqptu6rs+rJt67ppqrJtqqYtm6pq267s6rIs27ovaZppap5nmprnmaZpmrJsmqorW56nmp4oqqrmiaZqqqosm6aqypbnmaoniqrqiZ5rmqoqy6Zq2qppmrZsqqotm6Yqy65t+77ryrJuqqpsm6pq66ZqyrJsy77vyqruiqYpy6aq2rJpqrIt27Lvy7Ks+6JpyrJpqrJtqqouy7JtG7Ns+7pomrJtqqYtm6oq27It+7os27rvyq5vq6qs67It+7ru+q5w67owvLJs+6qs+ror27pv6zLb9n1E05RlUzVt21RVWXZl2fZl2/Z90TRtW1VVWzZN1bZlWfZ9WbZtYTRN2TZVVdZN1bRtWZZtYbZl4XZl2bdlW/Z115V1X9d949dl3ea6su3Lsq37qqv6tu77wnDrrvAKAAAYcAAACDChDBQashIAiAIAAIxhjDEIjVLOOQehUco55yBkzkEIIZXMOQghlJI5B6GUlDLnIJSSUgihlJRaCyGUlFJrBQAAFDgAAATYoCmxOEChISsBgFQAAIPjWJbnmaJq2rJjSZ4niqqpqrbtSJbniaJpqqptW54niqapqq7r65rniaJpqqrr6rpomqapqq7ruroumqKpqqrrurKum6aqqq4ru7Ls66aqqqrryq4s+8Kquq4ry7Jt68Kwqq7ryrJs27Zv3Lqu677v+8KRreu6LvzCMQxHAQDgCQ4AQAU2rI5wUjQWWGjISgAgAwCAMAYhgxBCBiGEkFJKIaWUEgAAMOAAABBgQhkoNGRFABAnAAAYQymklFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSCmllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSqmklFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimVUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFJKKaWUUkoppZRSSimllFIKAJCKcACQejChDBQashIASAUAAIxRSinGnIMQMeYYY9BJKClizDnGHJSSUuUchBBSaS23yjkIIaTUUm2Zc1JaizHmGDPnpKQUW805h1JSi7HmmmvupLRWa64151paqzXXnHPNubQWa64515xzyzHXnHPOOecYc84555xzzgUA4DQ4AIAe2LA6wknRWGChISsBgFQAAAIZpRhzzjnoEFKMOecchBAihRhzzjkIIVSMOeccdBBCqBhzzDkIIYSQOecchBBCCCFzDjroIIQQQgcdhBBCCKGUzkEIIYQQSighhBBCCCGEEDoIIYQQQgghhBBCCCGEUkoIIYQQQgmhlFAAAGCBAwBAgA2rI5wUjQUWGrISAAACAIAclqBSzoRBjkGPDUHKUTMNQkw50ZliTmozFVOQORCddBIZakHZXjILAACAIAAgwAQQGCAo+EIIiDEAAEGIzBAJhVWwwKAMGhzmAcADRIREAJCYoEi7uIAuA1zQxV0HQghCEIJYHEABCTg44YYn3vCEG5ygU1TqIAAAAAAADADgAQDgoAAiIpqrsLjAyNDY4OjwCAAAAAAAFgD4AAA4PoCIiOYqLC4wMjQ2ODo8AgAAAAAAAAAAgICAAAAAAABAAAAAgIBPZ2dTAAQiVgAAAAAAAGJlAAACAAAAPuePCyUVNCgpJykqMhMUFBQfHwEBAQEBAQEBAQEBAQEdEzctLSorKy05DN3v80/tzuTbDAAIjkoywUfOEcIE+hpP+uN/gfj5tZru2igfgIQDAAAAAAAAQAEAoAc3r//fwfNvv/rq+Zf287YCJEmyCLVtAF4r78vbHxDkgw/cXzRKwAMAfv4QAL4OAAAAAAAAAAAAAJ7KAACUfwBeK+/L2x8Q5IMP3F80SoEHAPz8oQC8agAAAAAAAAAAAAB+NgsAUPcKAF4r78vbH1DkAyvuzxplAiQcAAAAAAAAAGoAVgAA8L/mpgAA8WWjAF4r7+PbPwDi/PbA/a1EKfAAgH9/EYCtAAAAAAAAAAAAAFCvUQAAPBgAXivvy9sfIPLBF+7PGiXgJACAbwAAAAAAAAAAAACAmmMAFHhiAACgMpUAFhtv8kvCS8N+Pj8wNzFQwkPX148e1XWNnjoAHQAAAAAAAAAIn45Uqd4jhK0agAfgDgFM7+99+cuBAAAA4BkbeKnpdxEAVO/v4xZaAwAAAJwRcO718GsOSABM7+99+VsDAAAA+lPC93HqWwI0AETv7/NrWgMAAAD6DwF3X5ddAkgAbGHK5YvHI8F64jVDQ8WPV23Gj1dtTo+/76dH77FDAaRMdby4jQQ2sj3eJY6lXhSNpV6sjN1drMzdlStuhwAKDg4OBgAAAAAAAAAALNkvru5u3yUwvf56EPWqzemN99h41eb0xntsPANM6+/jO5Ly/rGyAQAAQJOIESgAOhuP9CXJluV/OJCf5ewtPDjPT58+PSfsawCADigAAAAAAADak7cmS87/Oz6JMcYYc4wKUXsAAF4rj+vbH6DbB3457i+aCE4CAPgGAAoAAAAAAAAAAKDmGAD0eGIAAACqC4MBAF4rj+vbH6DbB3457i+aCE4CALgGgAIAAAAAAAAAAPCzAgBnfDUAAADV1+EBAF4rj8v7f93N+f2X4/46oznBAwD+9h0A/C0AAAAAAAAAAAAAdwEAAFj1AF4r9/uHP4CwD3y63Z81mgtOAgB4AAAAAAAAAAAAAAAYTQBwemwKAACMxwNeK4/r2x+A2wd+Oe4vGs0DJwEA/A4AAAAAAAAAAAAAlJ0AgNMwIwAAMK46XiuP41vCSzfnt98dz28lugA8AODfHw4Afk4AAAAAAAAAoAjVA1ToBgAAWP4BPro26EcSKvkNW8cpZ2/hoTcGkM/TAQAjoVAAFAoAAOHdL++QeFPqn83NVQAoN6EBEgCPADSACYAC"></audio>
<br>
<canvas width="640" height="100"></canvas>
<br>
<button onclick="play()">Play</button>
Note the audio source is hardcoded here to avoid CORS issues; normally audio.source would be set on line 3 immediately after the audio var is initialized, e.g.
var audio = document.querySelector('audio');
audio.src = 'https://www.example.com/foo.mp3';
The AnalyserNode is not connected yet. It needs to get the signal as an input which you want to analyze.
You can achieve that by changing the following line ...
source.connect(context.destination);
... into ...
source.connect(analyser).connect(context.destination);
I am testing the FPS with my laptop using the Intel(R) Iris(R) Plus Graphics 655 card.
To test the threeJS example with Instance rendering and merge-drawcall rendering.
So I used both the QRCode_buffergeometry.json model and the suzanne_buffergeometry.json model.
for the QRCode_buffergeometry.json: vertex:12852, face: 4284
and for the suzanne_buffergeometry.json: vertex:1515 face: 967
Then the FPS for the suzanne_buffergeometry with 8000 count:
INSTANCE: 36
MERGED: 43
NATIVE: from 23 to 35 by rotation
for the QRCode_buffergeometry model with 8000 count:
INSTANCE: 9
MERGED: 15-17
NATIVE: 17-19
I am very confused with this performance.
1. As far as my understanding, with no matter if i use instance or merge-drawcall, the drawcall is fixed to be 1 and the total face number to draw is same, why merged-drawcall is better than instance? Since the face and vertex number are both same, I suppose what happened in the vertex shader for transform the vertex should be same too, so why merged is faster?
For the QRCode_buffergeometry model, native is almost same as merged, and better than instance, so I guess the CPU is not the bottle neck but the GPU is, however the final drawing data should be same, i mean eventually the face number to be draw should be same, why native is faster?, isn't that the instance is supposed to be the best way? I am pretty sure the camera's far and near is big enough, so there should not be any culling issue.
When I am trying to optimize some big scene, when should I pick merge? when to pick instance? and when maybe no doing anything is better?
Any help?
Thanks a lot~~~
Attached the code for the sample is here
body { margin: 0; }
<div id="container"></div>
<script type="module">
import * as THREE from 'https://cdn.jsdelivr.net/npm/three#0.112.1/build/three.module.js';
import Stats from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/libs/stats.module.js';
import {
GUI
} from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/libs/dat.gui.module.js';
import {
OrbitControls
} from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/controls/OrbitControls.js';
import {
BufferGeometryUtils
} from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/utils/BufferGeometryUtils.js';
var container, stats, gui, guiStatsEl;
var camera, controls, scene, renderer, material;
// gui
var Method = {
INSTANCED: 'INSTANCED',
MERGED: 'MERGED',
NAIVE: 'NAIVE'
};
var api = {
method: Method.INSTANCED,
mesh_number: 1,
count_per_mesh: 1000
};
var modelName = 'suzanne_buffergeometry.json';
var modelScale = (modelName === 'suzanne_buffergeometry.json' ? 1 : 0.01);
var modelVertex = (modelName === 'suzanne_buffergeometry.json' ? 1515 : 12852);
var modelFace = (modelName === 'suzanne_buffergeometry.json' ? 967 : 4284);
//
init();
initMesh();
animate();
//
function clean() {
var meshes = [];
scene.traverse(function(object) {
if (object.isMesh) meshes.push(object);
});
for (var i = 0; i < meshes.length; i++) {
var mesh = meshes[i];
mesh.material.dispose();
mesh.geometry.dispose();
scene.remove(mesh);
}
}
var randomizeMatrix = function() {
var position = new THREE.Vector3();
var rotation = new THREE.Euler();
var quaternion = new THREE.Quaternion();
var scale = new THREE.Vector3();
return function(matrix) {
position.x = Math.random() * 40 - 20;
position.y = Math.random() * 40 - 20;
position.z = Math.random() * 40 - 20;
rotation.x = Math.random() * 2 * Math.PI;
rotation.y = Math.random() * 2 * Math.PI;
rotation.z = Math.random() * 2 * Math.PI;
quaternion.setFromEuler(rotation);
scale.x = scale.y = scale.z = Math.random() * modelScale;
matrix.compose(position, quaternion, scale);
};
}();
function initMesh() {
clean();
console.time(api.method + ' (build)');
for (var i = 0; i < api.mesh_number; i++) {
// make instances
new THREE.BufferGeometryLoader()
.setPath('https://threejs.org/examples/models/json/')
.load(modelName, function(geometry) {
material = new THREE.MeshNormalMaterial();
geometry.computeVertexNormals();
switch (api.method) {
case Method.INSTANCED:
makeInstanced(geometry);
break;
case Method.MERGED:
makeMerged(geometry);
break;
case Method.NAIVE:
makeNaive(geometry);
break;
}
});
}
console.timeEnd(api.method + ' (build)');
var drawCalls = 0;
switch (api.method) {
case Method.INSTANCED:
case Method.MERGED:
drawCalls = api.mesh_number;
break;
case Method.NAIVE:
drawCalls = api.mesh_number * api.count_per_mesh;
break;
}
guiStatsEl.innerHTML = [
'<i>GPU draw calls</i>: ' + drawCalls,
'<i>Face Number</i>: ' + (modelFace * api.mesh_number * api.count_per_mesh),
'<i>Vertex Number</i>: ' + (modelVertex * api.mesh_number * api.count_per_mesh)
].join('<br/>');
}
function makeInstanced(geometry, idx) {
var matrix = new THREE.Matrix4();
var mesh = new THREE.InstancedMesh(geometry, material, api.count_per_mesh);
for (var i = 0; i < api.count_per_mesh; i++) {
randomizeMatrix(matrix);
mesh.setMatrixAt(i, matrix);
}
scene.add(mesh);
}
function makeMerged(geometry, idx) {
var instanceGeometry;
var geometries = [];
var matrix = new THREE.Matrix4();
for (var i = 0; i < api.count_per_mesh; i++) {
randomizeMatrix(matrix);
var instanceGeometry = geometry.clone();
instanceGeometry.applyMatrix(matrix);
geometries.push(instanceGeometry);
}
var mergedGeometry = BufferGeometryUtils.mergeBufferGeometries(geometries);
scene.add(new THREE.Mesh(mergedGeometry, material));
}
function makeNaive(geometry, idx) {
var matrix = new THREE.Matrix4();
for (var i = 0; i < api.count_per_mesh; i++) {
randomizeMatrix(matrix);
var mesh = new THREE.Mesh(geometry, material);
mesh.applyMatrix(matrix);
scene.add(mesh);
}
}
function init() {
var width = window.innerWidth;
var height = window.innerHeight;
// camera
camera = new THREE.PerspectiveCamera(70, width / height, 1, 100);
camera.position.z = 30;
// renderer
renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(width, height);
renderer.outputEncoding = THREE.sRGBEncoding;
container = document.getElementById('container');
container.appendChild(renderer.domElement);
// scene
scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
// controls
controls = new OrbitControls(camera, renderer.domElement);
controls.autoRotate = true;
// stats
stats = new Stats();
container.appendChild(stats.dom);
// gui
gui = new GUI();
gui.add(api, 'method', Method).onChange(initMesh);
gui.add(api, 'count_per_mesh', 1, 20000).step(1).onChange(initMesh);
gui.add(api, 'mesh_number', 1, 200).step(1).onChange(initMesh);
var perfFolder = gui.addFolder('Performance');
guiStatsEl = document.createElement('li');
guiStatsEl.classList.add('gui-stats');
perfFolder.__ul.appendChild(guiStatsEl);
perfFolder.open();
// listeners
window.addEventListener('resize', onWindowResize, false);
Object.assign(window, {
scene
});
}
//
function onWindowResize() {
var width = window.innerWidth;
var height = window.innerHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize(width, height);
}
function animate() {
requestAnimationFrame(animate);
controls.update();
stats.update();
render();
}
function render() {
renderer.render(scene, camera);
}
//
function getGeometryByteLength(geometry) {
var total = 0;
if (geometry.index) total += geometry.index.array.byteLength;
for (var name in geometry.attributes) {
total += geometry.attributes[name].array.byteLength;
}
return total;
}
// Source: https://stackoverflow.com/a/18650828/1314762
function formatBytes(bytes, decimals) {
if (bytes === 0) return '0 bytes';
var k = 1024;
var dm = decimals < 0 ? 0 : decimals;
var sizes = ['bytes', 'KB', 'MB'];
var i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
}
</script>
This is only guesses
Three.js by default culls if things are outside the frustum.
We can turn this off with mesh.frustumCulled = false. I didn't notice a difference and this should show up in the draw count.
Three.js by default sorts opaque objects back to front.
This means everything else being equal, sorted will run faster
than unsorted because of the depth test. If I set the depth test
to always
material.depthFunc = THREE.AlwaysDepth
Then I seem to get slightly faster rendering with instanced vs native. Of course
everything else is not equal.
An issue in Chrome.
If I run in Firefox or Safari I get the expected results. Merged > Instanced > Native
It could be a bug or it could be they're working around a driver or
security issue that the other browsers are not. You'd have to ask.
I need to show spectrum analyzer, when music is playing.
Now, the spetrum drawing separated with audio player.
If file ready (after loading with XHR) and song is playing, synchronously draving the spectrum.
var audioBuffer;
var sourceNode;
var analyser;
var javascriptNode;
var actx = new(AudioContext || webkitAudioContext)(), tid,
url = "https://cdn.rawgit.com/epistemex/free-music-for-test-and-demo/master/music/kf_colibris.mp3";
var ctx = $("#canvas").get()[0].getContext("2d");
var gradient = ctx.createLinearGradient(0,0,0,300);
gradient.addColorStop(1,'#000000');
gradient.addColorStop(0.75,'#ff0000');
gradient.addColorStop(0.25,'#ffff00');
gradient.addColorStop(0,'#ffffff');
setupAudioNodes();
function setupAudioNodes() {
// setup a javascript node
javascriptNode = actx.createScriptProcessor(2048, 1, 1);
// connect to destination, else it isn't called
javascriptNode.connect(actx.destination);
// setup a analyzer
analyser = actx.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 512;
// create a buffer source node
sourceNode = actx.createBufferSource();
sourceNode.connect(analyser);
analyser.connect(javascriptNode);
sourceNode.connect(actx.destination);
}
// load the specified sound
function playSound(buffer) {
sourceNode.buffer = buffer;
sourceNode.start(0);
}
// log if an error occurs
function onError(e) {
console.log(e);
}
// when the javascript node is called
// we use information from the analyzer node
// to draw the volume
javascriptNode.onaudioprocess = function() {
// get the average for the first channel
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
// clear the current state
ctx.clearRect(0, 0, 1000, 325);
// set the fill style
ctx.fillStyle=gradient;
drawSpectrum(array);
}
function drawSpectrum(array) {
for ( var i = 0; i < (array.length); i++ ){
var value = array[i];
ctx.fillRect(i*5,325-value,3,325);
// console.log([i,value])
}
};
// old draw --------------------------------------------------------
// STEP 1: Load audio file using AJAX ----------------------------------
loadXHR(url, decode);
tid = setInterval(function() {document.querySelector("div").innerHTML += "."}, 500);
function loadXHR(url, callback) {
try {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "arraybuffer";
xhr.onerror = function() {console.log("Network error.")};
xhr.onload = function() {
if (xhr.status === 200) callback(xhr.response);
else console.log("Loading error:" + xhr.statusText);
};
xhr.send();
} catch (err) {console.log(err.message)}
}
// STEP 2: Decode the audio file ---------------------------------------
function decode(buffer) {
clearInterval(tid);
document.querySelector("div").innerHTML = "Decoding file...";
actx.decodeAudioData(buffer, split);
}
// STEP 3: Split the buffer --------------------------------------------
function split(abuffer) {
document.querySelector("div").innerHTML = "Splitting...";
setTimeout(function() { // to allow DOM to update status-text
// calc number of segments and segment length
var channels = abuffer.numberOfChannels,
duration = abuffer.duration
rate = abuffer.sampleRate,
segmentLen = 10,
count = Math.floor(duration / segmentLen),
offset = 0,
// block = 10 * rate;
block = abuffer.length;
// while(count--) {
var url = URL.createObjectURL(bufferToWave(abuffer, offset, block));
var audio = new Audio(url);
audio.controls = true;
audio.volume = 0.5;
audio.autoplay = true;
document.body.appendChild(audio);
//offset += block;
// }
document.querySelector("div").innerHTML = "Ready!";
}, 60)
playSound(abuffer);
}
// Convert a audio-buffer segment to a Blob using WAVE representation
function bufferToWave(abuffer, offset, len) {
var numOfChan = abuffer.numberOfChannels,
length = len * numOfChan * 2 + 44,
buffer = new ArrayBuffer(length),
view = new DataView(buffer),
channels = [], i, sample,
pos = 0;
// write WAVE header
setUint32(0x46464952); // "RIFF"
setUint32(length - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit (hardcoded in this demo)
setUint32(0x61746164); // "data" - chunk
setUint32(length - pos - 4); // chunk length
// write interleaved data
for(i = 0; i < abuffer.numberOfChannels; i++)
channels.push(abuffer.getChannelData(i));
while(pos < length) {
for(i = 0; i < numOfChan; i++) { // interleave channels
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767)|0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // update data chunk
pos += 2;
}
offset++ // next source sample
}
// create Blob
return new Blob([buffer], {type: "audio/wav"});
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
}
<canvas id="canvas" width="1000" height="325" style="display: block;"></canvas>
<div>Loading.</div>
Do NOT use a ScriptProcessorNode's onaudioprocess to do visual updates - skip the ScriptProcessor altogether, and use requestAnimationFrame to call getByteFrequencyData and do visual updates.
I want to build an animated alphabet, made up of particles. Basically, the particles transform from one letter shape to another.
My idea is to fill the letters as text on canvas real quickly (like for a frame), get the pixel data and put the particles to the correct location on setInterval. I have this code for scanning the screen right now:
var ctx = canvas.getContext('2d'),
width = ctx.canvas.width,
height = ctx.canvas.height,
particles = [],
gridX = 8,
gridY = 8;
function Particle(x, y) {
this.x = x;
this.y = y;
}
// fill some text
ctx.font = 'bold 80px sans-serif';
ctx.fillStyle = '#ff0';
ctx.fillText("STACKOVERFLOW", 5, 120);
// now parse bitmap based on grid
var idata = ctx.getImageData(0, 0, width, height);
// use a 32-bit buffer as we are only checking if a pixel is set or not
var buffer32 = new Uint32Array(idata.data.buffer);
// using two loops here, single loop with index-to-x/y is also an option
for(var y = 0; y < height; y += gridY) {
for(var x = 0; x < width; x += gridX) {
//buffer32[] will have a value > 0 (true) if set, if not 0=false
if (buffer32[y * width + x]) {
particles.push(new Particle(x, y));
}
}
}
// render particles
ctx.clearRect(0, 0, width, height);
particles.forEach(function(p) {
ctx.fillRect(p.x - 2, p.y - 2, 4, 4); // just squares here
})
But this way I am only showing one word, without any changes throughout the time. Also, I want to set up initially like 200 particles and reorganise them based on the pixel data, not create them on each scan.. How would you rewrite the code, so on every 1500ms I can pass a different letter and render it with particles?
Hopefully the different parts of this code should be clear enough : There are particles, that can draw and update, fillParticle will spawn particles out of a text string, and spawnChars will get a new part of the text rendered on a regular basis.
It is working quite well, play with the parameters if you wish, they are all at the start of the fiddle.
You might want to make this code cleaner, by avoiding globals and creating classes.
http://jsbin.com/jecarupiri/1/edit?js,output
// --------------------
// parameters
var text = 'STACKOVERFLOW';
var fontHeight = 80;
var gridX = 4,
gridY = 4;
var partSize = 2;
var charDelay = 400; // time between two chars, in ms
var spead = 80; // max distance from start point to final point
var partSpeed = 0.012;
// --------------------
var canvas = document.getElementById('cv'),
ctx = canvas.getContext('2d'),
width = ctx.canvas.width,
height = ctx.canvas.height,
particles = [];
ctx.translate(0.5,0.5);
// --------------------
// Particle class
function Particle(startX, startY, finalX, finalY) {
this.speed = partSpeed*(1+Math.random()*0.5);
this.x = startX;
this.y = startY;
this.startX = startX;
this.startY = startY;
this.finalX =finalX;
this.finalY =finalY;
this.parameter = 0;
this.draw = function() {
ctx.fillRect(this.x - partSize*0.5, this.y - partSize*0.5, partSize, partSize);
};
this.update = function(p) {
if (this.parameter>=1) return;
this.parameter += partSpeed;
if (this.parameter>=1) this.parameter=1;
var par = this.parameter;
this.x = par*this.finalX + (1-par)*this.startX;
this.y = par*this.finalY + (1-par)*this.startY;
};
}
// --------------------
// Text spawner
function fillParticle(text, offx, offy, spread) {
// fill some text
tmpCtx.clearRect(0,0,tmpCtx.canvas.width, tmpCtx.canvas.height);
tmpCtx.font = 'bold ' + fontHeight +'px sans-serif';
tmpCtx.fillStyle = '#A40';
tmpCtx.textBaseline ='top';
tmpCtx.textAlign='left';
tmpCtx.fillText(text, 0, 0);
//
var txtWidth = Math.floor(tmpCtx.measureText(text).width);
// now parse bitmap based on grid
var idata = tmpCtx.getImageData(0, 0, txtWidth, fontHeight);
// use a 32-bit buffer as we are only checking if a pixel is set or not
var buffer32 = new Uint32Array(idata.data.buffer);
// using two loops here, single loop with index-to-x/y is also an option
for(var y = 0; y < fontHeight; y += gridY) {
for(var x = 0; x < txtWidth; x += gridX) {
//buffer32[] will have a value > 0 (true) if set, if not 0=false
if (buffer32[y * txtWidth + x]) {
particles.push(new Particle(offx + x+Math.random()*spread - 0.5*spread,
offy + y+Math.random()*spread - 0.5*spread, offx+x, offy+y));
}
}
}
return txtWidth;
}
var tmpCv = document.createElement('canvas');
// uncomment for debug
//document.body.appendChild(tmpCv);
var tmpCtx = tmpCv.getContext('2d');
// --------------------------------
// spawn the chars of the text one by one
var charIndex = 0;
var lastSpawnDate = -1;
var offX = 30;
var offY = 30;
function spawnChars() {
if (charIndex>= text.length) return;
if (Date.now()-lastSpawnDate < charDelay) return;
offX += fillParticle(text[charIndex], offX, offY, spead);
lastSpawnDate = Date.now();
charIndex++;
}
// --------------------------------
function render() {
// render particles
particles.forEach(function(p) { p.draw();
});
}
function update() {
particles.forEach(function(p) { p.update(); } );
}
// --------------------------------
// animation
function animate(){
requestAnimationFrame(animate);
ctx.clearRect(0, 0, width, height);
render();
update();
//
spawnChars();
}
// launch :
animate();
I am trying to load a MP3, and a HRTF file (wav) and then by Convolution to produce another output audio signal but I am a bit stuck how to do it.
sound1 = new Sound;
sound1.addEventListener (Event.COMPLETE, completeHandler);
sound1.load (new URLRequest ("test.mp3"));
channel1 = sound1.play (0, int.MAX_VALUE);
function completeHandler (event:Event):void
{ bytes1 = new ByteArray;
samples1 = new <Number> [];
fft1 = new FFT;
addEventListener (Event.ENTER_FRAME, enterFrameHandler);
}
function enterFrameHandler (event:Event):void
{ // get bytes of 512 samples
bytes1.position = 0;
sound1.extract (bytes, 1024, channel1.position * 44.1);
// get samples of left or right channel
bytes1.position = 0;
while (bytes1.bytesAvailable > 0)
{ samples1 [int (bytes1.position / 8)] = bytes1.readFloat (); bytes1.readFloat();
}
// analyze samples1
fft1.analyze (samples1);
var limitHz:Number = 5000;
var i:int=0;
var n1:int = fft1.magnitudes.length * limitHz / (44100 / 2);
// And then here I should somehow multiply the signal1*signal2?
for (i = 0; i < n1; i++)
{ var sound1Mag:int = fft1.magnitudes [i];
var sound2Mag:int = fft2.magnitudes [i];
//......?????
}
}
Any help will be much appreciated.
OK, I edited to mention that the second file is not wav but a MP3 too (I converted the wav to MP3)