网络音频API - 使用

问题描述:

我试图创建无功的音乐可视化,因为看到了这个样品中:http://webaudioapi.com/samples/visualizer/网络音频API - 使用<audio>元素

我想要做的就是使用缓冲音频htmlmediaelements避免缓慢。

如何将可视化功能连接到音频元素?

编辑:我已经尝试了一遍使用blip.js,但我得到一个线analyser.getByteTimeDomainData(dataArray)的错误;

这里是代码:

$(document).ready(function() { 
    var audio = new Audio('5minutes.mp3'); 
    var source = blip.node('audioBufferSource'); 
    var analyser = blip.node('analyser'); 
    var canvas = document.querySelector('canvas'); 
    var canvasCtx = canvas.getContext('2d'); 
    var audioCtx = blip.getContext; 

    source.connect(analyser); 
    var bufferLength = analyser.frequencyBinCount; 
    var dataArray = new Uint8Array(bufferLength); 

    function draw() { 
     WIDTH = 512; 
     HEIGTH = 256; 
     drawVisual = requestAnimationFrame(draw); 
     analyser.getByteTimeDomainData(dataArray); 
     canvasCtx.fillStyle = 'rgb(200, 200, 200)'; 
     canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); 
     canvasCtx.lineWidth = 2; 
     canvasCtx.strokeStyle = 'rgb(0, 0, 0)'; 
     canvasCtx.beginPath(); 
     var sliceWidth = WIDTH * 1.0/bufferLength; 
     var x = 0; 
     for(var i = 0; i < bufferLength; i++) { 

      var v = dataArray[i]/128.0; 
      var y = v * HEIGHT/2; 

      if(i === 0) { 
       canvasCtx.moveTo(x, y); 
      } else { 
       canvasCtx.lineTo(x, y); 
      } 

      x += sliceWidth; 
     } 
     canvasCtx.lineTo(canvas.width, canvas.height/2); 
     canvasCtx.stroke(); 
    }; 
    draw(); 

    $('#play').on('click', function() { 
     audio.play(); 
    }); 
    $('#stop').on('click', function() { 
     audio.stop(); 
    }); 
}); 

我已经找到一种方法:

http://jsfiddle.net/tomasantunes/hb5huzew/

$(document).ready(function() { 
var audioCtx = new (window.AudioContext || window.webkitAudioContext)(); 
var myAudio = document.querySelector('audio'); 
var pre = document.querySelector('pre'); 
var myScript = document.querySelector('script'); 

pre.innerHTML = myScript.innerHTML; 

var source = audioCtx.createMediaElementSource(myAudio); 

var analyser = audioCtx.createAnalyser(); 

source.connect(分析仪); analyser.connect(audioCtx.destination);

var canvas = document.querySelector('canvas'); 
var canvasCtx = canvas.getContext('2d'); 

var bufferLength = analyser.frequencyBinCount; 
var dataArray = new Uint8Array(bufferLength); 

function draw() { 
    WIDTH = 512; 
    HEIGHT = 256; 
    drawVisual = requestAnimationFrame(draw); 
    analyser.getByteTimeDomainData(dataArray); 
    canvasCtx.fillStyle = 'rgb(200, 200, 200)'; 
    canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); 
    canvasCtx.lineWidth = 2; 
    canvasCtx.strokeStyle = 'rgb(0, 0, 0)'; 
    canvasCtx.beginPath(); 
    var sliceWidth = WIDTH * 1.0/bufferLength; 
    var x = 0; 
    for(var i = 0; i < bufferLength; i++) { 

     var v = dataArray[i]/128.0; 
     var y = v * HEIGHT/2; 

     if(i === 0) { 
      canvasCtx.moveTo(x, y); 
     } else { 
      canvasCtx.lineTo(x, y); 
     } 

     x += sliceWidth; 
    } 
    canvasCtx.lineTo(canvas.width, canvas.height/2); 
    canvasCtx.stroke(); 
}; 
draw(); 

});