User:David.humphrey/Audio Data API 2: Difference between revisions

 
(14 intermediate revisions by the same user not shown)
Line 33: Line 33:
* Native array interfaces instead of using accessors and IDL array arguments.
* Native array interfaces instead of using accessors and IDL array arguments.
* No zero padding of audio data occurs anymore.  All frames are exactly 4096 elements in length.
* No zero padding of audio data occurs anymore.  All frames are exactly 4096 elements in length.
* Added '''mozSampleOffset()'''
* Added '''mozCurrentSampleOffset()'''
* Removed undocumented position/buffer methods on audio element.
* Removed undocumented position/buffer methods on audio element.
* Added '''mozChannels''' and '''mozRate''' to '''loadedmetadata' event.
* Added '''mozChannels''', '''mozRate''', '''mozFrameBufferLength''' to '''loadedmetadata' event.


Demos written for the previous version are '''not''' compatible, though can be made to be quite easily.  See details below.
Demos written for the previous version are '''not''' compatible, though can be made to be quite easily.  See details below.
Line 65: Line 65:
var channels,
var channels,
     rate,
     rate,
    frameBufferLength,
     samples;
     samples;


function audioInfo(event) {
function audioInfo(event) {
   channels = event.mozChannels;
   channels         = event.mozChannels;
   rate     = event.mozRate;
   rate             = event.mozRate;
  frameBufferLength = event.mozFrameBufferLength;
}
}


Line 107: Line 109:
       var canvas = document.getElementById('fft'),
       var canvas = document.getElementById('fft'),
           ctx = canvas.getContext('2d'),
           ctx = canvas.getContext('2d'),
           channels,
           fft;
          rate;


       function loadedMetadata(event) {
       function loadedMetadata(event) {
         channels = event.mozChannels;
         var channels         = event.mozChannels,
        rate = event.mozRate;
            rate             = event.mozRate,
            frameBufferLength = event.mozFrameBufferLength;
       
        fft = new FFT(frameBufferLength / channels, rate),
       }
       }


       function audioWritten(event) {
       function audioWritten(event) {
         var fb = event.mozFrameBuffer,
         var fb = event.mozFrameBuffer,
            fft = new FFT(fb.length / channels, rate),
             signal = new Float32Array(fb.length / channels),
             signal = new Float32Array(fb.length / channels),
             magnitude;
             magnitude;


         for (var i = 0, fbl = fb.length / 2; i < fbl; i++ ) {
         for (var i = 0, fbl = fb.length / 2; i < fbl; i++ ) {
           // Assuming interlaced stereo channels, need to split and merge into a stero-mix mono signal
           // Assuming interlaced stereo channels,
          // need to split and merge into a stero-mix mono signal
           signal[i] = (fb[2*i] + fb[2*i+1]) / 2;
           signal[i] = (fb[2*i] + fb[2*i+1]) / 2;
         }
         }
Line 132: Line 136:


         for (var i = 0; i < fft.spectrum.length; i++ ) {
         for (var i = 0; i < fft.spectrum.length; i++ ) {
           magnitude = fft.spectrum[i] * 4000; // multiply spectrum by a zoom value
          // multiply spectrum by a zoom value
           magnitude = fft.spectrum[i] * 4000;


           // Draw rectangle bars for each frequency bin
           // Draw rectangle bars for each frequency bin
Line 177: Line 182:
             spectrum    = this.spectrum;
             spectrum    = this.spectrum;


        if ( bufferSize % 2 !== 0 ) {
          throw "Invalid buffer size, must be a power of 2.";
        }
         if ( bufferSize !== buffer.length ) {
         if ( bufferSize !== buffer.length ) {
           throw "Supplied buffer is not the same size as defined FFT. FFT Size: " +
           throw "Supplied buffer is not the same size as defined FFT. FFT Size: " +
Line 266: Line 268:
</pre>
</pre>


<code>mozSampleOffset()</code>
<code>mozCurrentSampleOffset()</code>


<pre>
<pre>
// Get current position of the underlying audio stream, measured in samples written.
// Get current position of the underlying audio stream, measured in samples written.
var currentSampleOffset = audioOutput.mozSampleOffset();
var currentSampleOffset = audioOutput.mozCurrentSampleOffset();
</pre>
</pre>


Line 291: Line 293:
   // Setup a2 to be identical to a1, and play through there.
   // Setup a2 to be identical to a1, and play through there.
   a2.mozSetup(event.mozChannels, event.mozRate, 1);
   a2.mozSetup(event.mozChannels, event.mozRate, 1);
}
}


Line 315: Line 316:
   <body>
   <body>
     <input type="text" size="4" id="freq" value="440"><label for="hz">Hz</label>
     <input type="text" size="4" id="freq" value="440"><label for="hz">Hz</label>
    <button onclick="generateWaveform()">set</button>
     <button onclick="start()">play</button>
     <button onclick="start()">play</button>
     <button onclick="stop()">stop</button>
     <button onclick="stop()">stop</button>


     <script type="text/javascript">
     <script type="text/javascript">
       var sampledata = [];
       var sampleRate = 44100,
      var freq = 440;
          portionSize = sampleRate / 10,
      var interval = -1;
          prebufferSize = sampleRate / 2,
      var audio;
          freq = undefined; // no sound


       function writeData() {
       var audio = new Audio();
        var n = Math.ceil(freq / 100);
      audio.mozSetup(1, sampleRate, 1);
        for(var i=0;i<n;i++)
       var currentWritePosition = 0;
          audio.mozWriteAudio(sampledata);
       }


       function start() {
       function getSoundData(t, size) {
         audio = new Audio();
         var soundData = new Float32Array(size);
         audio.mozSetup(1, 44100, 1);
         if (freq) {
        interval = setInterval(writeData, 10);
          var k = 2* Math.PI * freq / sampleRate;
          for (var i=0; i<size; i++) {
            soundData[i] = Math.sin(k * (i + t));
          }
        }
        return soundData;
       }
       }


       function stop() {
       function writeData() {
         if (interval != -1) {
         while(audio.mozCurrentSampleOffset() + prebufferSize >= currentWritePosition) {
           clearInterval(interval);
           var soundData = getSoundData(currentWritePosition, portionSize);
           interval = -1;
          audio.mozWriteAudio(soundData);
           currentWritePosition += portionSize;
         }
         }
       }
       }


       function generateWaveform() {
      // initial write
      writeData();
      var writeInterval = Math.floor(1000 * portionSize / sampleRate);
      setInterval(writeData, writeInterval);
 
       function start() {
         freq = parseFloat(document.getElementById("freq").value);
         freq = parseFloat(document.getElementById("freq").value);
        // we're playing at 44.1kHz, so figure out how many samples
        // will give us one full period
        var samples = 44100 / freq;
        // Could use Array() here too, typed array is faster
        sampledata = Float32Array(Math.round(samples));
        for (var i=0; i<sampledata.length; i++) {
          sampledata[i] = Math.sin(2*Math.PI * (i / sampledata.length));
        }
       }
       }


       generateWaveform();
       function stop() {
        freq = undefined;
      }
   </script>
   </script>
   </body>
   </body>
Line 378: Line 381:
   readonly attribute unsigned long mozChannels;
   readonly attribute unsigned long mozChannels;
   readonly attribute unsigned long mozRate;
   readonly attribute unsigned long mozRate;
  readonly attribute unsigned long mozFrameBufferLength;
};
};
</pre>
</pre>


The '''mozChannels''' attribute contains a the number of channels in this audio resource (e.g., 2).  The '''mozRate''' attribute contains the number of samples per second that will be played, for example 44100.
The '''mozChannels''' attribute contains a the number of channels in this audio resource (e.g., 2).  The '''mozRate''' attribute contains the number of samples per second that will be played, for example 44100.  The '''mozFrameBufferLength''' attribute contains the number of samples that will be returned in each '''AudioWritten''' event.  This number is a total for all channels (e.g., 2 channels * 2048 samples = 4096 total).


===== nsIDOMNotifyAudioWrittenEvent =====
===== nsIDOMNotifyAudioWrittenEvent =====
Line 456: Line 460:
=== JavaScript Audio Libraries ===
=== JavaScript Audio Libraries ===


We have started work on a JavaScript library to make building audio web apps easier.  Details are [[Audio Data API JS Library|here]].
* We have started work on a JavaScript library to make building audio web apps easier.  Details are [[Audio Data API JS Library|here]].
* [http://github.com/bfirsh/dynamicaudio.js dynamicaudio.js] - An interface for writing audio with a Flash fall back for older browsers.


=== Working Audio Data Demos ===
=== Working Audio Data Demos ===
Line 468: Line 473:
* FFT visualization (calculated with js)
* FFT visualization (calculated with js)
** http://weare.buildingsky.net/processing/dsp.js/examples/fft.html
** http://weare.buildingsky.net/processing/dsp.js/examples/fft.html
* Beat Detection (also showing use of WebGL for 3D visualizations)
** http://cubicvr.org/CubicVR.js/bd3/BeatDetektor1HD-13a.html (video [http://vimeo.com/11345262 here])
** http://cubicvr.org/CubicVR.js/bd3/BeatDetektor2HD-13a.html (video of older version [http://vimeo.com/11345685 here])
** http://cubicvr.org/CubicVR.js/bd3/BeatDetektor3HD-13a.html (video [http://www.youtube.com/watch?v=OxoFcyKYwr0&fmt=22 here])


* Writing Audio from JavaScript, Digital Signal Processing
* Writing Audio from JavaScript, Digital Signal Processing
** Csound shaker instrument ported to JavaScript via Processing.js http://scotland.proximity.on.ca/dxr/tmp/audio/shaker/
** Csound shaker instrument ported to JavaScript via Processing.js http://scotland.proximity.on.ca/dxr/tmp/audio/shaker/


==== Demos Needing to be Updated to New API ====
==== Demos Needing to be Updated to New API ====
Line 483: Line 492:
** http://ondras.zarovi.cz/demos/audio/
** http://ondras.zarovi.cz/demos/audio/


* Beat Detection (also showing use of WebGL for 3D visualizations)
** http://cubicvr.org/CubicVR.js/BeatDetektor1HD.html (video [http://vimeo.com/11345262 here])
** http://cubicvr.org/CubicVR.js/BeatDetektor2HD.html (video [http://vimeo.com/11345685 here])
** http://weare.buildingsky.net/processing/beat_detektor/beat_detektor.html
** http://weare.buildingsky.net/processing/beat_detektor/beat_detektor.html
** http://code.bocoup.com/processing-js/3d-fft/viz.xhtml
** http://code.bocoup.com/processing-js/3d-fft/viz.xhtml
Confirmed users
656

edits