Question

I am trying to set up some web audio to load/play multiple sound sources simultaneously. The sounds are being loaded for now and play is triggered through a button input.

My problem is, I want all the sounds to run through one BiquadFilter (in this case type:0; // LOWPASS filter). I believe I have created the filter right (in two different places, not sure which look at the attached code) but I cannot get a range input to control the frequencies, something isn't communicating very well and im lost completely.

Also, around the same topic, I want each individual sound to run through their own independent gainNodes (volume controls), this again will be changed via a range input. Basically there will be 6 audio files, running through their own gainNodes and then coming together to go through a LOWPASS filter before the destination (i.e. the speakers).

Im hopefully going to run through individual pannerNodes but currently facing the chance of giving up with the project all together.

Below is my code (like i said before, the button is triggering all the sounds but but the filter is a BIG problem):

HTML:

<body> 

<div id="startbtn">
<p><input type="button" onClick="tracks.toggle();">PLAY!</p>
</div> <!-- startbtn div -->

<div id="frequency">
<p><input type="range" id="freq1" min="0" max="1" step="0.01" value="1"     onchange="sound.changeFrequency(this);" style="width:180px; background-color:#FFF;">    Frequency</p>
</p>
</div>

<script>
var tracks = new SongTracks();
var sound = new playSound();
</script>
</body>

JAVASCRIPT:

var context = new webkitAudioContext();
var myAudioAnalyser;

  function init() {
            if('webkitAudioContext' in window) {
                myAudioContext = new webkitAudioContext();
                // an analyser is used for the spectrum
                myAudioAnalyser = myAudioContext.createAnalyser();
                myAudioAnalyser.smoothingTimeConstant = 0.85;
                myAudioAnalyser.connect(myAudioContext.destination);

               fetchSounds();
              };
            };


// shim layer with setTimeout fallback
window.requestAnimFrame = (function(){
return  window.requestAnimationFrame       || 
window.webkitRequestAnimationFrame || 
window.mozRequestAnimationFrame    || 
window.oRequestAnimationFrame      || 
window.msRequestAnimationFrame     || 
function( callback ){
window.setTimeout(callback, 1000 / 60);
};
})();


function playSound(buffer, time) {
  var source = context.createBufferSource();
  source.buffer = buffer;

 var filter = context.createBiquadFilter();  ///////////////// HERE
 filter.type = filter.LOWPASS;
 filter.frequency.value = 5000;

 source.connect(filter);
 filter.connect(context.destination);
 source.start(time);

this.filter = filter;

};

function loadSounds(obj, soundMap, callback) {

var names = []
var paths = []
for (var name in soundMap) {
    var path = soundMap[name];
    names.push(name);
    paths.push(path);
}
bufferLoader = new BufferLoader(context, paths, function(bufferList) {
    for (var i = 0; i < bufferList.length; i++) {
        var buffer = bufferList[i];
        var name = names[i];
        obj[name] = buffer;
    }
    if (callback) {
        callback();
    }
});
bufferLoader.load();
};

function BufferLoader(context, urlList, callback) {
  this.context = context;
  this.urlList = urlList;
  this.onload = callback;
  this.bufferList = new Array();
  this.loadCount = 0;
}

BufferLoader.prototype.loadBuffer = function(url, index) {
  // Load buffer asynchronously
  var request = new XMLHttpRequest();
  request.open("GET", url, true);
  request.responseType = "arraybuffer";

  var loader = this;

  request.onload = function() {
    // Asynchronously decode the audio file data in request.response
    loader.context.decodeAudioData(
    request.response,
      function(buffer) {
      if (!buffer) {
      alert('error decoding file data: ' + url);
      return;
    }
    loader.bufferList[index] = buffer;
    if (++loader.loadCount == loader.urlList.length)
      loader.onload(loader.bufferList);
  },
  function(error) {
    console.error('decodeAudioData error', error);
  }
 );
}

  request.onerror = function() {
      alert('BufferLoader: XHR error');
  }

  request.send();
};

BufferLoader.prototype.load = function() {
  for (var i = 0; i < this.urlList.length; ++i)
  this.loadBuffer(this.urlList[i], i);
};

var SongTracks = function() {
  loadSounds(this, {
    vocals: 'tracks/vocals.mp3',
    guitar: 'tracks/guitar.mp3',
    piano: 'tracks/piano.mp3'
  });
};

var filter;

SongTracks.prototype.play = function() {
    playSound(this.vocals, 0);
    playSound(this.guitar, 0);
    playSound(this.piano, 0);
///////////////////////////////////////////////////////////// OR HERE   
var source1 = context.createBufferSource();
source1.buffer = this.buffer
source1 = bufferList[0];


var filter = context.createBiquadFilter();
filter.type = filter.LOWPASS;
filter.frequency.value = 5000;

source1.connect(filter);
filter.connect(context.destination);

this.filter = filter;
///////////////////////////////////////////////////////////////////// TO HERE?
};

  SongTracks.prototype.stop = function() {
  this.source.stop(0);
};

SongTracks.prototype.toggle = function() {
  this.isPlaying ? this.stop() : this.play();
  this.isPlaying = !this.isPlaying;
};   

/* SongTracks.prototype.changeFrequency = function(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;

var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
this.filter.frequency.value = maxValue * multiplier;
}; */

playSound.prototype.changeFrequency = function(element) {
var minValue = 40;
var maxValue = context.sampleRate / 2;

var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
this.filter.frequency.value = maxValue * multiplier;
};


</script>

As you can see, through my notes etc, im very confused and kind of hit a brick wall. I've seen code which differentiates the audio files, something like;

   var source1 = context.createBufferSource();
   var source2 = context.createBufferSource();
   var source3 = context.createBufferSource();
   var source4 = context.createBufferSource();

  source1.buffer = bufferList[0];
  source2.buffer = bufferList[1];
  source3.buffer = bufferList[2];
  source4.buffer = bufferList[3];

But i have no idea, good luck.

Was it helpful?

Solution

You should probably simply pass the node to connect to into playSound, and then pass it the FilterNode.

Inside your playSound is the wrong place to create the BiquadFilter - you'll end up creating N of them, one for each playing sound, and you only want one.

You want something like:

HTML file the same, except:

<input type="range" id="freq1" min="0" max="1" step="0.01" value="1" onchange="changeFilterFrequency(this);" style="width:180px; background-color:#FFF;">    Frequency</p>

JS:

function playSound(buffer, outputNode, time) {
    var source = context.createBufferSource();
    source.buffer = buffer;

    source.connect(outputNode);
    source.start(time);
}

var globalFilter = null;  // one global filter

SongTracks.prototype.play = function() {
    var globalFilter = context.createBiquadFilter();
    globalFilter.type = globalFilter.LOWPASS;
    globalFilter.frequency.value = 5000;
    globalFilter.connect(context.destination);

    playSound(this.vocals, globalFilter, 0);
    playSound(this.guitar, globalFilter, 0);
    playSound(this.piano, globalFilter, 0);
};

function changeFilterFrequency(element) {
    var minValue = 40;
    var maxValue = context.sampleRate / 2;

    var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
    var multiplier = Math.pow(2, numberOfOctaves * (element.value - 1.0));
    globalFilter.frequency.value = maxValue * multiplier;
}
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top