Web Audio API

While the HTML5 Audio tag offers us a lot of possibilities for playback of audio, it doesn't really allow much in the way of manipulation or other more advanced audio topics. Fortunately, things are starting to be rectified with with the Web Audio API. Let's go into a bit of how to use it:

Playback

// The context is the base for the API.
var audioContext = new AudioContext();
// We can load an audio file by using an audio tag
var audiotoload = document.getElementById("audiotoplay");	
var audioSource = audioContext.createMediaElementSource(audiotoload);

// To simply play it, we can connect it to the "destination" or default output of the context
audioSource.connect(audioContext.destination); // Connect to the default output
audiotoload.play();
			

Effects - Chain together

			
var audioContext = new AudioContext();
var audiotoload = document.getElementById("audiotoplay");	
var audioSource = audioContext.createMediaElementSource(audiotoload); // No longer playable by the normal audio tag

// We can construct a "Gain node" and connect our audio to it
var gainNode = audioContext.createGain();
audioSource.connect(gainNode);
// We can then connect it to our output to play it with gain control
gainNode.connect(audioContext.destination);
audiotoload.play();

// Here is a slider on the page which change the gain
var volumeControl = document.getElementById("volume");
volumeControl.addEventListener("change", function(event) {
	console.log(event);
	gainNode.gain.value = event.target.value;
	audiotoload.play();	
});		

// The element on the page
// <input type="range" name="volume" id="volume" min="0" max="10" step=".1">
			
More HTML5 Rocks: Web Audio Intro
Mozilla: Web Audio API

Microphone

One of the great things is that we can access the microphone as an input using our WebRTC getUserMedia call
var audiosource = null;

// if we have the method
if (navigator.getUserMedia) {
	navigator.getUserMedia({video: true}, function(stream) {
			audioSource = context.createMediaStreamSource(stream);
		}, function(error) {alert("Failure " + error.code);}
	);
}			
				

Analysis

Doing FFT is easy!
var analyser = (analyser || audioContext.createAnalyser());
audioSource.connect(analyser);

audiotoload.play();

var drawingCanvas = document.getElementById("drawingCanvas");
var drawingContext = drawingCanvas.getContext("2d");
drawingContext.fillStyle = "#FF0000";

var performAnalysis = function() {

	var frequencies =  new Uint8Array(analyser.frequencyBinCount);
	analyser.getByteFrequencyData(frequencies);	
	
	for (var i = 0; i < frequencies.length; i++) {
		//console.log(frequencies[i]);
		drawingContext.fillRect(i*10,frequencies[i], 10, drawingCanvas.height);
	}		
	requestAnimationFrame(performAnalysis);
};

performAnalysis();
			
requestAnimationFrame documentation
Web Audio API samples from a forthcoming book

Recording

Matt Diamond wrote Recorderjs to make our lives easier for capturing and saving audio
var recorder;
//recorder = new Recorder(audioSource); // Once we have the stream...
			
var startRecording = function() {
	recorder.record();
};

var stopRecording = function() {
	recorder.stop();
	
	recorder.exportWAV(function(blob) {
		var url = URL.createObjectURL(blob);
		var au = document.createElement('audio');
		var hf = document.createElement('a');

		au.controls = true;
		au.src = url;
		hf.href = url;
		hf.download = new Date().toISOString() + '.wav';
		hf.innerHTML = hf.download;
			document.body.appendChild(au);
			document.body.appendChild(hf);
		});				
};