Audio visualizations using Web Audio API

Have you ever seen some visual effects when playing music with your favourite music player? (Here’s some example photos.)

effects-2

effects-1

effects-3

This is just… great. But how they do this kind of effects? We need to understand how sound is made first. Sound is actually the vibrations of air (and more generally, objects) and the change in air pressure is recorded using a microphone. Everytime when you play a music, the speakers generates the vibrations using the change in air pressures recorded by microphone.

We can make use of these data to make visual effects from sounds. In this tutorial, I’ll teach you how to create a visualization like this:

web-audio-visuals-demo

In this demonstration, we’ll be able to drop audio files in and play the music with visuals. OK! Let’s start!

First, we need to set up the HTML for our visuals. It’ll be pretty simple as we only need a canvas.

<canvas id="canvas" style="width: 100%; height: 100%;"></canvas>

Then, it’s time to code some JavaScript!

// Init. all the variables we need
var audio, source, analyser, canvas, ctx;

// Logs messages
function clog(str) {
    if (window.console && console.log) console.log(str);
}

// This is an empty event handler
// thatprevents the event from happening
function noop(e) {
    e.stopPropagation();
    e.preventDefault();
}

// This is the function used to
// handle the drop of the audio files.
function drop(e) {
    // Prevents the default action of the event
    // In this case, open an audio file
    e.stopPropagation();
    e.preventDefault();

    // Creates a file reader
    var reader = new FileReader();

    // What should we do when the reader is ready?
    reader.onload = function(e) {
        // Decode the audio file
        // using the Web Audio API
        if (audio.decodeAudioData) {
            audio.decodeAudioData(e.target.result, function(b) {
                source.buffer = b;

                // Play the audio file
                source.noteOn(0);
            }, function(e) {
                clog(e);
                alert('Audio not playable or not supported.');
            });
        }
        else {
            source.buffer = audio.createBuffer(e.target.result, true);

            // Play the audio file
            source.noteOn(0);
        }
    }

    // Read the dropped file
    reader.readAsArrayBuffer(e.dataTransfer.files[0]);    
}

// The function that actually
// draws the visuals
function draw(e) {
    // Get the audio data
    var inp = e.inputBuffer.getChannelData(0);
    var out = e.outputBuffer.getChannelData(0);

    // Start drawing the visuals
    // We first clear the canvas first
    ctx.clearRect(0, 0, canvas.width, canvas.height);
    ctx.strokeStyle = '#6633FF';
    ctx.beginPath();
    ctx.moveTo(0, canvas.height / 2);

    // Loop through the data and plot them
    for (var i = 0; i < inp.length; i++) {
        out[i] = 0;
        i == 0 ? ctx.moveTo(0, canvas.height / 2 + inp[i] * canvas.height / 2) : ctx.lineTo(canvas.width * i / inp.length, canvas.height / 2 + inp[i] * canvas.height / 2);
    }

    // Draw it!
    ctx.stroke();
}

window.onload = function() {
    // Init. the audio context
    // and the canvas
    audio = new(window.AudioContext || window.webkitAudioContext)();
    canvas = document.querySelector('#canvas');
    ctx = canvas.getContext('2d');
    canvas.width = window.innerWidth;
    canvas.height = window.innerHeight;

    // Listen for file drop
    // We also need to prevent the default actions of
    // other related events
    canvas.addEventListener('dragover', noop, false);
    canvas.addEventListener('drop', drop, false);
    document.addEventListener('dragover', noop, false);
    document.addEventListener('drop', drop, false);

    // Create the audio source
    // and the analyser that
    // actually gets the audio data
    source = audio.createBufferSource();
    analyser = audio.createScriptProcessor(1024, 1, 1);
    analyser.onaudioprocess = draw;
    source.connect(analyser);
    analyser.connect(audio.destination);
    source.connect(audio.destination);
    source.loop = true;
    source.gain.value = 1;
};

// Handle window resize
function resize() {
    canvas.width = window.innerWidth;
    canvas.height = window.innerHeight;
}

// Add the resize listener
window.addEventListener('resize', resize, false);

OK, it’s finished. I’ve prepared a live demo for all you guys. Enjoy! 🙂

GLSL fragment shaders in JavaScript!

Recent I see some very nice visual effects done in GLSL fragment shader here. I love them very much and I started learning GLSL fragment shaders afterwards.

I think that they are great but they must run on GPU. Only some browser vendors provide API to access the GPU (Actually, that API is called WebGL) so these visuals can’t run on some browsers that don’t offer the WebGL API.

In order to overcome these, I tried to port GLSL fragment shaders to javascript and draw the result using HTML5 canvas. The canvas API has a much broader browser support (and a flash fallback is available).

The demos I created are un-optimized and maybe a bit slow. After all, GLSL shaders are supposed to run on a GPU. However, it’s still worth to convert some of the GLSL shaders to achieve some nice effect such as post-processing of photos.

Here’s the demo:

  1. http://jsfiddle.net/licson0729/eBjQ8/
  2. http://jsfiddle.net/licson0729/YJqB9/
  3. http://jsfiddle.net/licson0729/7Qe34/
  4. http://jsfiddle.net/licson0729/T3hb7/
  5. http://jsfiddle.net/licson0729/9QVxA/

The techniques I used is to render the pixels one by one, with the render function be the one in the GLSL fragment shader. Also, we need to change the schematics (vec2, vec3, etc.) to its JavaScript equivalent.

Here’s the format of the code:

//The requestAnimFrame fallback for better and smoother animation
window.requestAnimFrame = (function () {
    return window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame || function (callback) {
        window.setTimeout(callback, 1000 / 60);
    };
})();

//Prepare our canvas
var canvas = document.querySelector('#render');
var w = window.innerWidth;
var h = window.innerHeight;
canvas.width = w;
canvas.height = h;
var ctx = canvas.getContext('2d');

var time = Date.now();
var buffer = ctx.createImageData(w, h);//The back buffer we used to paint the result into the canvas

//The main rencer function
function render(time, fragcoord) {
    /* put the GLSL fragment shader's JavaScript equivalent here. */
    return [0,0,0,0]; //the final colour value
};

function animate() {
    var delta = (Date.now() - time) / 1000;
    buffer = ctx.createImageData(w, h);
    ctx.clearRect(0, 0, w, h);
    for (var x = 0; x < w; x++) {
        for (var y = 0; y < h; y++) {
            var ret = render(delta, [x, y]);
            var i = (y * buffer.width + x) * 4;
            buffer.data[i] = ret[0] * 255;
            buffer.data[i + 1] = ret[1] * 255;
            buffer.data[i + 2] = ret[2] * 255;
            buffer.data[i + 3] = ret[3] * 255;
        }
    }
    ctx.putImageData(buffer, 0, 0);
    requestAnimFrame(animate);
};

window.onresize = function () {
    w = window.innerWidth;
    h = window.innerHeight;
    canvas.width = w;
    canvas.height = h;
};

animate();

Hope you like it and encouraged you to start learning about computer graphics.