Skip to content

Instantly share code, notes, and snippets.

@Xanmia
Created March 11, 2016 20:42
Show Gist options
  • Save Xanmia/05774a64a4da7d32683d to your computer and use it in GitHub Desktop.
Save Xanmia/05774a64a4da7d32683d to your computer and use it in GitHub Desktop.
Simple Audio Visualization
<canvas id="canvas" ></canvas>
<canvas id="canvas2" ></canvas>
<div id="note">Drag and Drop a Music File</div>
// audio and analyser loading from http://codepen.io/agorkem/pen/qdazPG
var audio,
analyser,
audioContext,
sourceNode,
stream,
row = 0,
next = 0,
canvas = document.getElementById('canvas'),
canvas2 = document.getElementById('canvas2'),
note = document.getElementById('note'),
context = canvas.getContext('2d'),
context2 = canvas2.getContext('2d'),
width = canvas.width = canvas2.width = window.innerWidth,
height = canvas2.height = canvas.height = window.innerHeight,
c = 0;
audioContext = new AudioContext();
analyser = (analyser || audioContext.createAnalyser());
analyser.smoothingTimeConstant = 1;
analyser.fftSize = 512;
window.addEventListener('drop', function(e) {
e.stopPropagation();
e.preventDefault();
stream = URL.createObjectURL(event.dataTransfer.files[0]);
if (audio)
audio.pause();
audio = new Audio();
audio.crossOrigin = "anonymous"
audio.src = stream;
start();
note.style.display = 'none';
});
window.addEventListener('dragover', function(e) {
e.stopPropagation();
e.preventDefault();
});
function start() {
audio.addEventListener('canplay', function() {
sourceNode = audioContext.createMediaElementSource(audio);
sourceNode.connect(analyser);
sourceNode.connect(audioContext.destination);
audio.play();
});
}
update();
function update() {
var freqArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(freqArray);
context2.clearRect(0, 0, width, height);
context2.lineWidth = 10;
context2.beginPath();
for (var i = 0; i < freqArray.length; i += 10) { //skipping 10 to save your computers life
var v = freqArray[i];
context2.strokeStyle = 'rgb(' + v + ', ' + v + ',' + v + ')'
context.fillStyle = 'rgb(0, ' + v + ',' + v / 1.5 + ')'
context2.lineTo(i * (width / freqArray.length), (height / 2) + (v - 128));
context2.stroke();
context.beginPath();
context.arc((width / 2), (height / 2), v * 1.5, 0, Math.PI * 2, true)
context.fill();
//
//}
}
requestAnimationFrame(update);
}

Simple Audio Visualization

Messing around with AudioContext and Canvas. Simple example, drop a music file on the canvas and watch.

A Pen by Xanmia on CodePen.

License.

body,html{margin:0;padding:0;border:0;width:100%;height:100%}
canvas{position:absolute;}
div{ color:white;background-color:silver;padding: 5px 15px;border-radius: 15px; font: 400 25px 'Open Sans', sans-serif;text-align:center;}
body{
background-color: #000000;
margin: 0px;
overflow: hidden;
background-image: radial-gradient(ellipse farthest-corner at center, #23233f 0%, #000000 80%);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment