User:David.humphrey/Audio Data API 2: Difference between revisions

Line 109: Line 109:
       var canvas = document.getElementById('fft'),
       var canvas = document.getElementById('fft'),
           ctx = canvas.getContext('2d'),
           ctx = canvas.getContext('2d'),
          channels,
          rate,
          frameBufferLength,
           fft;
           fft;


       function loadedMetadata(event) {
       function loadedMetadata(event) {
         channels = event.mozChannels;
         var channels         = event.mozChannels,
        rate = event.mozRate;
            rate             = event.mozRate,
         fft = new FFT(4096 / channels, rate),
            frameBufferLength = event.mozFrameBufferLength;
       
         fft = new FFT(frameBufferLength / channels, rate),
       }
       }


Line 126: Line 125:


         for (var i = 0, fbl = fb.length / 2; i < fbl; i++ ) {
         for (var i = 0, fbl = fb.length / 2; i < fbl; i++ ) {
           // Assuming interlaced stereo channels, need to split and merge into a stero-mix mono signal
           // Assuming interlaced stereo channels,
          // need to split and merge into a stero-mix mono signal
           signal[i] = (fb[2*i] + fb[2*i+1]) / 2;
           signal[i] = (fb[2*i] + fb[2*i+1]) / 2;
         }
         }
Line 136: Line 136:


         for (var i = 0; i < fft.spectrum.length; i++ ) {
         for (var i = 0; i < fft.spectrum.length; i++ ) {
           magnitude = fft.spectrum[i] * 4000; // multiply spectrum by a zoom value
          // multiply spectrum by a zoom value
           magnitude = fft.spectrum[i] * 4000;


           // Draw rectangle bars for each frequency bin
           // Draw rectangle bars for each frequency bin
Confirmed users
656

edits