Audio visualizations using Web Audio API

Have you ever seen some visual effects when playing music with your favourite music player? (Here’s some example photos.)




This is just… great. But how they do this kind of effects? We need to understand how sound is made first. Sound is actually the vibrations of air (and more generally, objects) and the change in air pressure is recorded using a microphone. Everytime when you play a music, the speakers generates the vibrations using the change in air pressures recorded by microphone.

We can make use of these data to make visual effects from sounds. In this tutorial, I’ll teach you how to create a visualization like this:


In this demonstration, we’ll be able to drop audio files in and play the music with visuals. OK! Let’s start!

First, we need to set up the HTML for our visuals. It’ll be pretty simple as we only need a canvas.

<canvas id="canvas" style="width: 100%; height: 100%;"></canvas>

Then, it’s time to code some JavaScript!

// Init. all the variables we need
var audio, source, analyser, canvas, ctx;

// Logs messages
function clog(str) {
    if (window.console && console.log) console.log(str);

// This is an empty event handler
// thatprevents the event from happening
function noop(e) {

// This is the function used to
// handle the drop of the audio files.
function drop(e) {
    // Prevents the default action of the event
    // In this case, open an audio file

    // Creates a file reader
    var reader = new FileReader();

    // What should we do when the reader is ready?
    reader.onload = function(e) {
        // Decode the audio file
        // using the Web Audio API
        if (audio.decodeAudioData) {
            audio.decodeAudioData(, function(b) {
                source.buffer = b;

                // Play the audio file
            }, function(e) {
                alert('Audio not playable or not supported.');
        else {
            source.buffer = audio.createBuffer(, true);

            // Play the audio file

    // Read the dropped file

// The function that actually
// draws the visuals
function draw(e) {
    // Get the audio data
    var inp = e.inputBuffer.getChannelData(0);
    var out = e.outputBuffer.getChannelData(0);

    // Start drawing the visuals
    // We first clear the canvas first
    ctx.clearRect(0, 0, canvas.width, canvas.height);
    ctx.strokeStyle = '#6633FF';
    ctx.moveTo(0, canvas.height / 2);

    // Loop through the data and plot them
    for (var i = 0; i < inp.length; i++) {
        out[i] = 0;
        i == 0 ? ctx.moveTo(0, canvas.height / 2 + inp[i] * canvas.height / 2) : ctx.lineTo(canvas.width * i / inp.length, canvas.height / 2 + inp[i] * canvas.height / 2);

    // Draw it!

window.onload = function() {
    // Init. the audio context
    // and the canvas
    audio = new(window.AudioContext || window.webkitAudioContext)();
    canvas = document.querySelector('#canvas');
    ctx = canvas.getContext('2d');
    canvas.width = window.innerWidth;
    canvas.height = window.innerHeight;

    // Listen for file drop
    // We also need to prevent the default actions of
    // other related events
    canvas.addEventListener('dragover', noop, false);
    canvas.addEventListener('drop', drop, false);
    document.addEventListener('dragover', noop, false);
    document.addEventListener('drop', drop, false);

    // Create the audio source
    // and the analyser that
    // actually gets the audio data
    source = audio.createBufferSource();
    analyser = audio.createScriptProcessor(1024, 1, 1);
    analyser.onaudioprocess = draw;
    source.loop = true;
    source.gain.value = 1;

// Handle window resize
function resize() {
    canvas.width = window.innerWidth;
    canvas.height = window.innerHeight;

// Add the resize listener
window.addEventListener('resize', resize, false);

OK, it’s finished. I’ve prepared a live demo for all you guys. Enjoy! 🙂

Leave a Reply

Your email address will not be published. Required fields are marked *