I need to show spectrum analyzer, when music is playing.
Now, the spetrum drawing separated with audio player.
If file ready (after loading with XHR) and song is playing, synchronously draving the spectrum.
var audioBuffer;
var sourceNode;
var analyser;
var javascriptNode;
var actx = new(AudioContext || webkitAudioContext)(), tid,
url = "https://cdn.rawgit.com/epistemex/free-music-for-test-and-demo/master/music/kf_colibris.mp3";
var ctx = $("#canvas").get()[0].getContext("2d");
var gradient = ctx.createLinearGradient(0,0,0,300);
gradient.addColorStop(1,'#000000');
gradient.addColorStop(0.75,'#ff0000');
gradient.addColorStop(0.25,'#ffff00');
gradient.addColorStop(0,'#ffffff');
setupAudioNodes();
function setupAudioNodes() {
// setup a javascript node
javascriptNode = actx.createScriptProcessor(2048, 1, 1);
// connect to destination, else it isn't called
javascriptNode.connect(actx.destination);
// setup a analyzer
analyser = actx.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 512;
// create a buffer source node
sourceNode = actx.createBufferSource();
sourceNode.connect(analyser);
analyser.connect(javascriptNode);
sourceNode.connect(actx.destination);
}
// load the specified sound
function playSound(buffer) {
sourceNode.buffer = buffer;
sourceNode.start(0);
}
// log if an error occurs
function onError(e) {
console.log(e);
}
// when the javascript node is called
// we use information from the analyzer node
// to draw the volume
javascriptNode.onaudioprocess = function() {
// get the average for the first channel
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
// clear the current state
ctx.clearRect(0, 0, 1000, 325);
// set the fill style
ctx.fillStyle=gradient;
drawSpectrum(array);
}
function drawSpectrum(array) {
for ( var i = 0; i < (array.length); i++ ){
var value = array[i];
ctx.fillRect(i*5,325-value,3,325);
// console.log([i,value])
}
};
// old draw --------------------------------------------------------
// STEP 1: Load audio file using AJAX ----------------------------------
loadXHR(url, decode);
tid = setInterval(function() {document.querySelector("div").innerHTML += "."}, 500);
function loadXHR(url, callback) {
try {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "arraybuffer";
xhr.onerror = function() {console.log("Network error.")};
xhr.onload = function() {
if (xhr.status === 200) callback(xhr.response);
else console.log("Loading error:" + xhr.statusText);
};
xhr.send();
} catch (err) {console.log(err.message)}
}
// STEP 2: Decode the audio file ---------------------------------------
function decode(buffer) {
clearInterval(tid);
document.querySelector("div").innerHTML = "Decoding file...";
actx.decodeAudioData(buffer, split);
}
// STEP 3: Split the buffer --------------------------------------------
function split(abuffer) {
document.querySelector("div").innerHTML = "Splitting...";
setTimeout(function() { // to allow DOM to update status-text
// calc number of segments and segment length
var channels = abuffer.numberOfChannels,
duration = abuffer.duration
rate = abuffer.sampleRate,
segmentLen = 10,
count = Math.floor(duration / segmentLen),
offset = 0,
// block = 10 * rate;
block = abuffer.length;
// while(count--) {
var url = URL.createObjectURL(bufferToWave(abuffer, offset, block));
var audio = new Audio(url);
audio.controls = true;
audio.volume = 0.5;
audio.autoplay = true;
document.body.appendChild(audio);
//offset += block;
// }
document.querySelector("div").innerHTML = "Ready!";
}, 60)
playSound(abuffer);
}
// Convert a audio-buffer segment to a Blob using WAVE representation
function bufferToWave(abuffer, offset, len) {
var numOfChan = abuffer.numberOfChannels,
length = len * numOfChan * 2 + 44,
buffer = new ArrayBuffer(length),
view = new DataView(buffer),
channels = [], i, sample,
pos = 0;
// write WAVE header
setUint32(0x46464952); // "RIFF"
setUint32(length - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
setUint32(0x20746d66); // "fmt " chunk
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit (hardcoded in this demo)
setUint32(0x61746164); // "data" - chunk
setUint32(length - pos - 4); // chunk length
// write interleaved data
for(i = 0; i < abuffer.numberOfChannels; i++)
channels.push(abuffer.getChannelData(i));
while(pos < length) {
for(i = 0; i < numOfChan; i++) { // interleave channels
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767)|0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // update data chunk
pos += 2;
}
offset++ // next source sample
}
// create Blob
return new Blob([buffer], {type: "audio/wav"});
function setUint16(data) {
view.setUint16(pos, data, true);
pos += 2;
}
function setUint32(data) {
view.setUint32(pos, data, true);
pos += 4;
}
}
<canvas id="canvas" width="1000" height="325" style="display: block;"></canvas>
<div>Loading.</div>
Do NOT use a ScriptProcessorNode's onaudioprocess to do visual updates - skip the ScriptProcessor altogether, and use requestAnimationFrame to call getByteFrequencyData and do visual updates.
Related
I am testing the FPS with my laptop using the Intel(R) Iris(R) Plus Graphics 655 card.
To test the threeJS example with Instance rendering and merge-drawcall rendering.
So I used both the QRCode_buffergeometry.json model and the suzanne_buffergeometry.json model.
for the QRCode_buffergeometry.json: vertex:12852, face: 4284
and for the suzanne_buffergeometry.json: vertex:1515 face: 967
Then the FPS for the suzanne_buffergeometry with 8000 count:
INSTANCE: 36
MERGED: 43
NATIVE: from 23 to 35 by rotation
for the QRCode_buffergeometry model with 8000 count:
INSTANCE: 9
MERGED: 15-17
NATIVE: 17-19
I am very confused with this performance.
1. As far as my understanding, with no matter if i use instance or merge-drawcall, the drawcall is fixed to be 1 and the total face number to draw is same, why merged-drawcall is better than instance? Since the face and vertex number are both same, I suppose what happened in the vertex shader for transform the vertex should be same too, so why merged is faster?
For the QRCode_buffergeometry model, native is almost same as merged, and better than instance, so I guess the CPU is not the bottle neck but the GPU is, however the final drawing data should be same, i mean eventually the face number to be draw should be same, why native is faster?, isn't that the instance is supposed to be the best way? I am pretty sure the camera's far and near is big enough, so there should not be any culling issue.
When I am trying to optimize some big scene, when should I pick merge? when to pick instance? and when maybe no doing anything is better?
Any help?
Thanks a lot~~~
Attached the code for the sample is here
body { margin: 0; }
<div id="container"></div>
<script type="module">
import * as THREE from 'https://cdn.jsdelivr.net/npm/three#0.112.1/build/three.module.js';
import Stats from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/libs/stats.module.js';
import {
GUI
} from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/libs/dat.gui.module.js';
import {
OrbitControls
} from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/controls/OrbitControls.js';
import {
BufferGeometryUtils
} from 'https://cdn.jsdelivr.net/npm/three#0.112.1/examples/jsm/utils/BufferGeometryUtils.js';
var container, stats, gui, guiStatsEl;
var camera, controls, scene, renderer, material;
// gui
var Method = {
INSTANCED: 'INSTANCED',
MERGED: 'MERGED',
NAIVE: 'NAIVE'
};
var api = {
method: Method.INSTANCED,
mesh_number: 1,
count_per_mesh: 1000
};
var modelName = 'suzanne_buffergeometry.json';
var modelScale = (modelName === 'suzanne_buffergeometry.json' ? 1 : 0.01);
var modelVertex = (modelName === 'suzanne_buffergeometry.json' ? 1515 : 12852);
var modelFace = (modelName === 'suzanne_buffergeometry.json' ? 967 : 4284);
//
init();
initMesh();
animate();
//
function clean() {
var meshes = [];
scene.traverse(function(object) {
if (object.isMesh) meshes.push(object);
});
for (var i = 0; i < meshes.length; i++) {
var mesh = meshes[i];
mesh.material.dispose();
mesh.geometry.dispose();
scene.remove(mesh);
}
}
var randomizeMatrix = function() {
var position = new THREE.Vector3();
var rotation = new THREE.Euler();
var quaternion = new THREE.Quaternion();
var scale = new THREE.Vector3();
return function(matrix) {
position.x = Math.random() * 40 - 20;
position.y = Math.random() * 40 - 20;
position.z = Math.random() * 40 - 20;
rotation.x = Math.random() * 2 * Math.PI;
rotation.y = Math.random() * 2 * Math.PI;
rotation.z = Math.random() * 2 * Math.PI;
quaternion.setFromEuler(rotation);
scale.x = scale.y = scale.z = Math.random() * modelScale;
matrix.compose(position, quaternion, scale);
};
}();
function initMesh() {
clean();
console.time(api.method + ' (build)');
for (var i = 0; i < api.mesh_number; i++) {
// make instances
new THREE.BufferGeometryLoader()
.setPath('https://threejs.org/examples/models/json/')
.load(modelName, function(geometry) {
material = new THREE.MeshNormalMaterial();
geometry.computeVertexNormals();
switch (api.method) {
case Method.INSTANCED:
makeInstanced(geometry);
break;
case Method.MERGED:
makeMerged(geometry);
break;
case Method.NAIVE:
makeNaive(geometry);
break;
}
});
}
console.timeEnd(api.method + ' (build)');
var drawCalls = 0;
switch (api.method) {
case Method.INSTANCED:
case Method.MERGED:
drawCalls = api.mesh_number;
break;
case Method.NAIVE:
drawCalls = api.mesh_number * api.count_per_mesh;
break;
}
guiStatsEl.innerHTML = [
'<i>GPU draw calls</i>: ' + drawCalls,
'<i>Face Number</i>: ' + (modelFace * api.mesh_number * api.count_per_mesh),
'<i>Vertex Number</i>: ' + (modelVertex * api.mesh_number * api.count_per_mesh)
].join('<br/>');
}
function makeInstanced(geometry, idx) {
var matrix = new THREE.Matrix4();
var mesh = new THREE.InstancedMesh(geometry, material, api.count_per_mesh);
for (var i = 0; i < api.count_per_mesh; i++) {
randomizeMatrix(matrix);
mesh.setMatrixAt(i, matrix);
}
scene.add(mesh);
}
function makeMerged(geometry, idx) {
var instanceGeometry;
var geometries = [];
var matrix = new THREE.Matrix4();
for (var i = 0; i < api.count_per_mesh; i++) {
randomizeMatrix(matrix);
var instanceGeometry = geometry.clone();
instanceGeometry.applyMatrix(matrix);
geometries.push(instanceGeometry);
}
var mergedGeometry = BufferGeometryUtils.mergeBufferGeometries(geometries);
scene.add(new THREE.Mesh(mergedGeometry, material));
}
function makeNaive(geometry, idx) {
var matrix = new THREE.Matrix4();
for (var i = 0; i < api.count_per_mesh; i++) {
randomizeMatrix(matrix);
var mesh = new THREE.Mesh(geometry, material);
mesh.applyMatrix(matrix);
scene.add(mesh);
}
}
function init() {
var width = window.innerWidth;
var height = window.innerHeight;
// camera
camera = new THREE.PerspectiveCamera(70, width / height, 1, 100);
camera.position.z = 30;
// renderer
renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(width, height);
renderer.outputEncoding = THREE.sRGBEncoding;
container = document.getElementById('container');
container.appendChild(renderer.domElement);
// scene
scene = new THREE.Scene();
scene.background = new THREE.Color(0xffffff);
// controls
controls = new OrbitControls(camera, renderer.domElement);
controls.autoRotate = true;
// stats
stats = new Stats();
container.appendChild(stats.dom);
// gui
gui = new GUI();
gui.add(api, 'method', Method).onChange(initMesh);
gui.add(api, 'count_per_mesh', 1, 20000).step(1).onChange(initMesh);
gui.add(api, 'mesh_number', 1, 200).step(1).onChange(initMesh);
var perfFolder = gui.addFolder('Performance');
guiStatsEl = document.createElement('li');
guiStatsEl.classList.add('gui-stats');
perfFolder.__ul.appendChild(guiStatsEl);
perfFolder.open();
// listeners
window.addEventListener('resize', onWindowResize, false);
Object.assign(window, {
scene
});
}
//
function onWindowResize() {
var width = window.innerWidth;
var height = window.innerHeight;
camera.aspect = width / height;
camera.updateProjectionMatrix();
renderer.setSize(width, height);
}
function animate() {
requestAnimationFrame(animate);
controls.update();
stats.update();
render();
}
function render() {
renderer.render(scene, camera);
}
//
function getGeometryByteLength(geometry) {
var total = 0;
if (geometry.index) total += geometry.index.array.byteLength;
for (var name in geometry.attributes) {
total += geometry.attributes[name].array.byteLength;
}
return total;
}
// Source: https://stackoverflow.com/a/18650828/1314762
function formatBytes(bytes, decimals) {
if (bytes === 0) return '0 bytes';
var k = 1024;
var dm = decimals < 0 ? 0 : decimals;
var sizes = ['bytes', 'KB', 'MB'];
var i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
}
</script>
This is only guesses
Three.js by default culls if things are outside the frustum.
We can turn this off with mesh.frustumCulled = false. I didn't notice a difference and this should show up in the draw count.
Three.js by default sorts opaque objects back to front.
This means everything else being equal, sorted will run faster
than unsorted because of the depth test. If I set the depth test
to always
material.depthFunc = THREE.AlwaysDepth
Then I seem to get slightly faster rendering with instanced vs native. Of course
everything else is not equal.
An issue in Chrome.
If I run in Firefox or Safari I get the expected results. Merged > Instanced > Native
It could be a bug or it could be they're working around a driver or
security issue that the other browsers are not. You'd have to ask.
I wrote test code for recording in the webpage, and the exported wav file can be played, but totally wrong, just a few meaningless sound, and the duration in VLC is not right. Please have a look is there any wrong in the code.
<html>
<head></head>
<body>
<script src="https://code.jquery.com/jquery-3.1.0.min.js"></script>
<button id='start'>Start</button>
<button id='stop'>Stop</button>
<button id='export'>Export</button>
<script>
var isRecording = false;
var buf = [];
var totalLen = 0;
var channelNum = 1;
var sampleRate = 48000;
$('#start').click(function()
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
var cxt = new AudioContext();
isRecording = true;
navigator.getUserMedia({audio:true}, function(stream){
var streamNode = cxt.createMediaStreamSource(stream);
var processNode = (cxt.createScriptProcessor || cxt.createJavaScriptNode).call(cxt, 1024 * 2, channelNum, channelNum);
processNode.onaudioprocess = function(e){
if(!isRecording) return;
var f = e.inputBuffer;
var cdata = f.getChannelData(channelNum -1);
buf.push(cdata);
totalLen += cdata.length;
};
streamNode.connect(processNode);
processNode.connect(cxt.destination);
}, function(){});
});
$('#stop').click(function(){
isRecording = false;
});
$('#export').click(function(){
var oneBuf = new Float32Array(totalLen);
var offset = 0;
for(var i=0;i<buf.length;++i){
oneBuf.set(buf[i], offset);
offset += buf[i].length;
}
var encodedBuf = encodeWAV(oneBuf);
var blob = new Blob([encodedBuf], {type:'audio/wav'});
console.log(encodedBuf);
var url = URL.createObjectURL(blob);
var hf = document.createElement('a');
hf.download = '1.wav';
hf.innerHTML = hf.download;
hf.href = url;
$('body').append(hf);
});
function encodeWAV(samples){
var arr = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(arr);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* RIFF chunk length */
view.setUint32(4, 36 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, channelNum, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, channelNum * 2, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);
floatTo16BitPCM(view, 44, samples);
console.log('my samplerate:%d', sampleRate);
return view;
}
function floatTo16BitPCM(view, offset, input) {
for (var i = 0; i < input.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, input[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
function writeString(view, offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
</script>
</body>
</html>
After doing some investigation. I found the problem.
var cdata = f.getChannelData(channelNum -1);
buf.push(cdata);
The variable buf stores sound data from audio buffer, but just stores the buffer reference. The buffer is continuously updated by the low level code.As a consequence, buf stores stores an array of the same buffer. That's why the sound played by VLC meaningless.
After changing to the below, the code works fine:
var cdata = f.getChannelData(channelNum -1);
buf.push(cdata.slice());
But still don't know why the duration value is incorrect in VLC currently.
Here is my code. This is my word search game ...In word list array ...these are my questions to find in grid in the game...my questions how can I type other languages in the array or how can I use the other languages in grids
import flash.display.*;
import flash.text.*;
import flash.geom.Point;
import flash.events.*;
import flash.net.FileFilter;
import flash.filters.GlowFilter;
const puzzleSize:uint = 20;
const spacing:Number = 24;
const outlineSize:Number = 20;
const offsetoint = new Point(162.9,179.7);
const offsettoint = new Point(300,265);
const spacingg:Number = 36;
const letterFormat:TextFormat = new TextFormat("Bamini",18,0x000000,true,false,false,null,null,TextFormatAlign.CENTER);
// words and grid
var wordList:Array;
var usedWords:Array;
var grid:Array;
// game state
var dragModetring;
var startPoint,endPointoint;
var numFound:int;
// sprites
var gameSpriteprite;
var outlineSpriteprite;
var oldOutlineSpriteprite;
var letterSpritesprite;
var wordsSpriteprite;
startWordSearch();
function startWordSearch() {
**// word list**
wordList = ("Lion,Tiger,Cheetah,Panther".split(",";
// set up the sprites
gameSprite = new Sprite();
addChild(gameSprite);
oldOutlineSprite = new Sprite();
gameSprite.addChild(oldOutlineSprite);
outlineSprite = new Sprite();
gameSprite.addChild(outlineSprite);
letterSprites = new Sprite();
gameSprite.addChild(letterSprites);
wordsSprite = new Sprite();
gameSprite.addChild(wordsSprite);
// array of letters
var letters:Array = placeLetters();
// array of sprites
grid = new Array();
for(var x:int=0;x<puzzleSize;x++) {
grid[x] = new Array();
for(var y:int=0;y<puzzleSize;y++) {
// create new letter field and sprite
var newLetter:TextField = new TextField();
newLetter.defaultTextFormat = letterFormat;
newLetter.x = x*spacing + offset.x;
newLetter.y = y*spacing + offset.y;
newLetter.width = spacing;
newLetter.height = spacing;
newLetter.text = letters[x][y];
newLetter.selectable = false;
var newLetterSpriteprite = new Sprite();
newLetterSprite.addChild(newLetter);
letterSprites.addChild(newLetterSprite);
grid[x][y] = newLetterSprite;
// add event listeners
newLetterSprite.addEventListener(MouseEvent.MOUSE_DOWN, clickLetter);
newLetterSprite.addEventListener(MouseEvent.MOUSE_OVER, overLetter);
}
}
// stage listener
stage.addEventListener(MouseEvent.MOUSE_UP, mouseRelease);
// create word list fields and sprites
for(var i:int=0;i<usedWords.length;i++) {
//var myglow:GlowFilter=new GlowFilter();
//myglow.color = 0x0000FF;
//myglow.blurX=5;
//myglow.blurY=5;
//myglow.strength=200;
var newWord:TextField = new TextField();
newWord.defaultTextFormat = letterFormat;
newWord.x = 700;
newWord.y = i*spacingg+offsett.y;
newWord.width =140;
newWord.height =spacing;
newWord.text = usedWords[i];
//newWord.filters=[myglow];
newWord.selectable = false;
wordsSprite.addChild(newWord);
}
// set game state
dragMode = "none";
numFound = 0;
}
// place the words in a grid of letters
function placeLetters():Array {
// create empty grid
var letters:Array = new Array();
for(var x:int=0;x<puzzleSize;x++) {
letters[x] = new Array();
for(var y:int=0;y<puzzleSize;y++) {
letters[x][y] = "*";
}
}
// make copy of word list
var wordListCopy:Array = wordList.concat();
usedWords = new Array();
// make 1000 attempts to add words
var repeatTimes:int = 1000;
repeatLoop:while (wordListCopy.length > 0) {
if (repeatTimes-- <= 0) break;
// pick a random word, location and direction
var wordNum:int = Math.floor(Math.random()*wordListCopy.length);
var wordtring = wordListCopy[wordNum].toUpperCase();
x = Math.floor(Math.random()*puzzleSize);
y = Math.floor(Math.random()*puzzleSize);
var dx:int = Math.floor(Math.random()*3)-1;
var dy:int = Math.floor(Math.random()*3)-1;
if ((dx == 0) && (dy == 0)) continue repeatLoop;
// check each spot in grid to see if word fits
letterLoop:for (var j:int=0;j<word.length;j++) {
if ((x+dx*j < 0) || (y+dy*j < 0) || (x+dx*j >= puzzleSize) || (y+dy*j >= puzzleSize)) continue repeatLoop;
var thisLettertring = letters[x+dx*j][y+dy*j];
if ((thisLetter != "*" && (thisLetter != word.charAt(j))) continue repeatLoop;
}
// insert word into grid
insertLoop:for (j=0;j<word.length;j++) {
letters[x+dx*j][y+dy*j] = word.charAt(j);
}
// remove word from list
wordListCopy.splice(wordNum,1);
usedWords.push(word);
}
// fill rest of grid with random letters
for(x=0;x<puzzleSize;x++) {
for(y=0;y<puzzleSize;y++) {
if (letters[x][y] == "*" {
letters[x][y] = String.fromCharCode(65+Math.floor(Math.random()*26));
}
}
}
return letters;
}
// player clicks down on a letter to start
function clickLetter(event:MouseEvent) {
var lettertring = event.currentTarget.getChildAt(0).text;
startPoint = findGridPoint(event.currentTarget);
dragMode = "drag";
}
// player dragging over letters
function overLetter(event:MouseEvent) {
if (dragMode == "drag" {
endPoint = findGridPoint(event.currentTarget);
// if valid range, show outline
outlineSprite.graphics.clear();
if (isValidRange(startPoint,endPoint)) {
drawOutline(outlineSprite,startPoint,endPoint,0x66CCFF);
}
}
}
// mouse released
function mouseRelease(event:MouseEvent) {
if (dragMode == "drag" {
dragMode = "none";
outlineSprite.graphics.clear();
// get word and check it
if (isValidRange(startPoint,endPoint)) {
var word = getSelectedWord();
checkWord(word);
}
}
}
// when a letter is clicked, find and return the x and y location
function findGridPoint(letterSpritebject)oint {
// loop through all sprites and find this one
for(var x:int=0;x<puzzleSize;x++) {
for(var y:int=0;y<puzzleSize;y++) {
if (grid[x][y] == letterSprite) {
return new Point(x,y);
}
}
}
return null;
}
// determine if range is in the same row, column, or a 45 degree diagonal
function isValidRange(p1,p2oint):Boolean {
if (p1.x == p2.x) return true;
if (p1.y == p2.y) return true;
if (Math.abs(p2.x-p1.x) == Math.abs(p2.y-p1.y)) return true;
return false;
}
// draw a thick line from one location to another
function drawOutline(sprite,p1,p2oint,c:Number) {
var offoint = new Point(offset.x+spacing/2, offset.y+spacing/2);
s.graphics.lineStyle(outlineSize,c);
s.graphics.moveTo(p1.x*spacing+off.x ,p1.y*spacing+off.y);
s.graphics.lineTo(p2.x*spacing+off.x ,p2.y*spacing+off.y);
}
// find selected letters based on start and end points
function getSelectedWord()tring {
// determine dx and dy of selection, and word length
var dx = endPoint.x-startPoint.x;
var dy = endPoint.y-startPoint.y;
var wordLength:Number = Math.max(Math.abs(dx),Math.abs(dy))+1;
// get each character of selection
var wordtring = "";
for(var i:int=0;i<wordLength;i++) {
var x = startPoint.x;
if (dx < 0) x -= i;
if (dx > 0) x += i;
var y = startPoint.y;
if (dy < 0) y -= i;
if (dy > 0) y += i;
word += grid[x][y].getChildAt(0).text;
}
return word;
}
// check word against word list
function checkWord(wordtring) {
// loop through words
for(var i:int=0;i<usedWords.length;i++) {
// compare word
if (word == usedWords[i].toUpperCase()) {
foundWord(word);
}
// compare word reversed
var reverseWordtring = word.split("".reverse().join("";
if (reverseWord == usedWords[i].toUpperCase()) {
foundWord(reverseWord);
}
}
}
// word found, remove from list, make outline permanent
function foundWord(wordtring) {
// draw outline in permanent sprite
drawOutline(oldOutlineSprite,startPoint,endPoint,0xE01212);
// find text field and set it to gray
for(var i:int=0;i<wordsSprite.numChildren;i++) {
if (TextField(wordsSprite.getChildAt).text.toUpperCase() == word) {
TextField(wordsSprite.getChildAt).textColor = 0xFF0000;
}
}
// see if all have been found
numFound++;
if (numFound ==usedWords.length) {
endGame();
}
}
function endGame() {
gotoAndStop("gameover";
cleanUp();
Timee.stop();
}
function cleanUp() {
removeChild(gameSprite);
gameSprite = null;
grid = null;
vv=null;
}
I want to build an animated alphabet, made up of particles. Basically, the particles transform from one letter shape to another.
My idea is to fill the letters as text on canvas real quickly (like for a frame), get the pixel data and put the particles to the correct location on setInterval. I have this code for scanning the screen right now:
var ctx = canvas.getContext('2d'),
width = ctx.canvas.width,
height = ctx.canvas.height,
particles = [],
gridX = 8,
gridY = 8;
function Particle(x, y) {
this.x = x;
this.y = y;
}
// fill some text
ctx.font = 'bold 80px sans-serif';
ctx.fillStyle = '#ff0';
ctx.fillText("STACKOVERFLOW", 5, 120);
// now parse bitmap based on grid
var idata = ctx.getImageData(0, 0, width, height);
// use a 32-bit buffer as we are only checking if a pixel is set or not
var buffer32 = new Uint32Array(idata.data.buffer);
// using two loops here, single loop with index-to-x/y is also an option
for(var y = 0; y < height; y += gridY) {
for(var x = 0; x < width; x += gridX) {
//buffer32[] will have a value > 0 (true) if set, if not 0=false
if (buffer32[y * width + x]) {
particles.push(new Particle(x, y));
}
}
}
// render particles
ctx.clearRect(0, 0, width, height);
particles.forEach(function(p) {
ctx.fillRect(p.x - 2, p.y - 2, 4, 4); // just squares here
})
But this way I am only showing one word, without any changes throughout the time. Also, I want to set up initially like 200 particles and reorganise them based on the pixel data, not create them on each scan.. How would you rewrite the code, so on every 1500ms I can pass a different letter and render it with particles?
Hopefully the different parts of this code should be clear enough : There are particles, that can draw and update, fillParticle will spawn particles out of a text string, and spawnChars will get a new part of the text rendered on a regular basis.
It is working quite well, play with the parameters if you wish, they are all at the start of the fiddle.
You might want to make this code cleaner, by avoiding globals and creating classes.
http://jsbin.com/jecarupiri/1/edit?js,output
// --------------------
// parameters
var text = 'STACKOVERFLOW';
var fontHeight = 80;
var gridX = 4,
gridY = 4;
var partSize = 2;
var charDelay = 400; // time between two chars, in ms
var spead = 80; // max distance from start point to final point
var partSpeed = 0.012;
// --------------------
var canvas = document.getElementById('cv'),
ctx = canvas.getContext('2d'),
width = ctx.canvas.width,
height = ctx.canvas.height,
particles = [];
ctx.translate(0.5,0.5);
// --------------------
// Particle class
function Particle(startX, startY, finalX, finalY) {
this.speed = partSpeed*(1+Math.random()*0.5);
this.x = startX;
this.y = startY;
this.startX = startX;
this.startY = startY;
this.finalX =finalX;
this.finalY =finalY;
this.parameter = 0;
this.draw = function() {
ctx.fillRect(this.x - partSize*0.5, this.y - partSize*0.5, partSize, partSize);
};
this.update = function(p) {
if (this.parameter>=1) return;
this.parameter += partSpeed;
if (this.parameter>=1) this.parameter=1;
var par = this.parameter;
this.x = par*this.finalX + (1-par)*this.startX;
this.y = par*this.finalY + (1-par)*this.startY;
};
}
// --------------------
// Text spawner
function fillParticle(text, offx, offy, spread) {
// fill some text
tmpCtx.clearRect(0,0,tmpCtx.canvas.width, tmpCtx.canvas.height);
tmpCtx.font = 'bold ' + fontHeight +'px sans-serif';
tmpCtx.fillStyle = '#A40';
tmpCtx.textBaseline ='top';
tmpCtx.textAlign='left';
tmpCtx.fillText(text, 0, 0);
//
var txtWidth = Math.floor(tmpCtx.measureText(text).width);
// now parse bitmap based on grid
var idata = tmpCtx.getImageData(0, 0, txtWidth, fontHeight);
// use a 32-bit buffer as we are only checking if a pixel is set or not
var buffer32 = new Uint32Array(idata.data.buffer);
// using two loops here, single loop with index-to-x/y is also an option
for(var y = 0; y < fontHeight; y += gridY) {
for(var x = 0; x < txtWidth; x += gridX) {
//buffer32[] will have a value > 0 (true) if set, if not 0=false
if (buffer32[y * txtWidth + x]) {
particles.push(new Particle(offx + x+Math.random()*spread - 0.5*spread,
offy + y+Math.random()*spread - 0.5*spread, offx+x, offy+y));
}
}
}
return txtWidth;
}
var tmpCv = document.createElement('canvas');
// uncomment for debug
//document.body.appendChild(tmpCv);
var tmpCtx = tmpCv.getContext('2d');
// --------------------------------
// spawn the chars of the text one by one
var charIndex = 0;
var lastSpawnDate = -1;
var offX = 30;
var offY = 30;
function spawnChars() {
if (charIndex>= text.length) return;
if (Date.now()-lastSpawnDate < charDelay) return;
offX += fillParticle(text[charIndex], offX, offY, spead);
lastSpawnDate = Date.now();
charIndex++;
}
// --------------------------------
function render() {
// render particles
particles.forEach(function(p) { p.draw();
});
}
function update() {
particles.forEach(function(p) { p.update(); } );
}
// --------------------------------
// animation
function animate(){
requestAnimationFrame(animate);
ctx.clearRect(0, 0, width, height);
render();
update();
//
spawnChars();
}
// launch :
animate();
I am trying to load a MP3, and a HRTF file (wav) and then by Convolution to produce another output audio signal but I am a bit stuck how to do it.
sound1 = new Sound;
sound1.addEventListener (Event.COMPLETE, completeHandler);
sound1.load (new URLRequest ("test.mp3"));
channel1 = sound1.play (0, int.MAX_VALUE);
function completeHandler (event:Event):void
{ bytes1 = new ByteArray;
samples1 = new <Number> [];
fft1 = new FFT;
addEventListener (Event.ENTER_FRAME, enterFrameHandler);
}
function enterFrameHandler (event:Event):void
{ // get bytes of 512 samples
bytes1.position = 0;
sound1.extract (bytes, 1024, channel1.position * 44.1);
// get samples of left or right channel
bytes1.position = 0;
while (bytes1.bytesAvailable > 0)
{ samples1 [int (bytes1.position / 8)] = bytes1.readFloat (); bytes1.readFloat();
}
// analyze samples1
fft1.analyze (samples1);
var limitHz:Number = 5000;
var i:int=0;
var n1:int = fft1.magnitudes.length * limitHz / (44100 / 2);
// And then here I should somehow multiply the signal1*signal2?
for (i = 0; i < n1; i++)
{ var sound1Mag:int = fft1.magnitudes [i];
var sound2Mag:int = fft2.magnitudes [i];
//......?????
}
}
Any help will be much appreciated.
OK, I edited to mention that the second file is not wav but a MP3 too (I converted the wav to MP3)