Web Audio API

While the HTML5 Audio tag offers us a lot of possibilities for playback of audio, it doesn't really allow much in the way of manipulation or other more advanced audio topics. Fortunately, things are starting to be rectified with with the Web Audio API. Let's go into a bit of how to use it:

Playback

// The context is the base for the API.
var audioContext = new AudioContext();
// We can load an audio file by using an audio tag
var audiotoload = document.getElementById("audiotoplay");	
var audioSource = audioContext.createMediaElementSource(audiotoload);

// To simply play it, we can connect it to the "destination" or default output of the context
audioSource.connect(audioContext.destination); // Connect to the default output
audiotoload.play();
			

Effects - Chain together

			
var audioContext = new AudioContext();
var audiotoload = document.getElementById("audiotoplay");	
var audioSource = audioContext.createMediaElementSource(audiotoload); // No longer playable by the normal audio tag

// We can construct a "Gain node" and connect our audio to it
var gainNode = audioContext.createGain();
audioSource.connect(gainNode);
// We can then connect it to our output to play it with gain control
gainNode.connect(audioContext.destination);
audiotoload.play();

// Here is a slider on the page which change the gain
var volumeControl = document.getElementById("volume");
volumeControl.addEventListener("change", function(event) {
	console.log(event);
	gainNode.gain.value = event.target.value;
	audiotoload.play();	
});		

// The element on the page
// <input type="range" name="volume" id="volume" min="0" max="10" step=".1">
			

More HTML5 Rocks: Web Audio Intro
Mozilla: Web Audio API

Microphone

One of the great things is that we can access the microphone as an input using our WebRTC getUserMedia call
				
	let constraints = { audio: true, video: false }	
	navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {		
		var audioContext = new AudioContext();	
		var audioSource = audioContext.createMediaStreamSource(stream);		
	})
	.catch(function(err) {
		alert(err);  
	});

				

Analysis

Doing FFT is easy!
	var audioContext = new AudioContext();	
	var audioSource = audioContext.createMediaStreamSource(stream);		
	
	// Create the analyser and set it up
	var analyser = audioContext.createAnalyser();

	analyser.fftSize = 256;
	var dataArray = new Uint8Array(analyser.frequencyBinCount);

	//analyser.getByteTimeDomainData(dataArray);
	//analyser.getByteFrequencyData(dataArray);

	// Connect to audio audio
	audioSource.connect(analyser);

	// Draw on Canvas
	var drawingCanvas = document.getElementById("drawingCanvas");
	var drawingContext = drawingCanvas.getContext("2d");

	var performAnalysis = function() {
		drawingContext.fillStyle = "#FFFFFF";
		drawingContext.fillRect(0,0,drawingCanvas.width,drawingCanvas.height);

		//analyser.getByteTimeDomainData(dataArray);
		analyser.getByteFrequencyData(dataArray);
		
		drawingContext.fillStyle = "#FF0000";

		let total = 0;
		for (var i = 0; i < dataArray.length; i++) {
			//console.log(frequencies[i]);
			drawingContext.fillRect(i,0,1,dataArray[i]);
			total+=dataArray[i];
		}		
		console.log("Volume:"+total/dataArray.length);
		requestAnimationFrame(performAnalysis);
	};

	performAnalysis();
			

Stream Manipulation

Just like we can get access to the audio stream and analyze it, we can also manipulate it and feed it out via WebRTC or even streams that come in through WebRTC from peers.
				
	let constraints = { audio: true, video: false }	
	navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {		
		var audioContext = new AudioContext();	
		var audioSource = audioContext.createMediaStreamSource(stream);		
		
		// Do something to it, in this case, add tones
		
		// This can combine audio streams
		var gainNode = audioContext.createGain();
		// Connect our source to it
		audioSource.connect(gainNode);
    
		// Create an oscillator
		var osc = audioContext.createOscillator();
		// Connect to gainNode
		osc.connect(gainNode);
		osc.frequency.value = 440; // Probably want to hook this up to something
		osc.start();

		// newStream.stream is what we would use to send to other people
		var newStream = audioContext.createMediaStreamDestination();  
		// Connect the gainNode to the stream
		gainNode.connect(newStream);

		/* If we have video, we want to put that on the newStream as well:
	  	// Extract the video tracks from the stream
		let videoTracks = stream.getVideoTracks();
    
		// Use the first video track, add it to the newStream
		if (videoTracks.length > 0) {
			newStream.stream.addTrack(videoTracks[0]);
		}    
		*/
	})
	.catch(function(err) {
		alert(err);  
	});
				
Full Example

requestAnimationFrame documentation
Web Audio API samples