如何从AudioContext.createAnalyser()控制音频



我使用WebRTC,我正确地接收流。如果我发送浏览器,一切都很好,但当我尝试使用AudioContext createAnalyser检查频率时。它也继续工作,但我不再控制音频的音量。在这里我留下代码:

function startUserMedia(stream) {
var canvas, ctx, again, fbc_array, bars = 100, bar_x, bar_width, bar_height;
var context = new AudioContext();
var analyser = context.createAnalyser();

source = context.createMediaStreamSource(stream); 
source.connect(analyser);
analyser.connect(context.destination);
canvas = document.getElementById("analyser");
ctx = canvas.getContext("2d");
frameLooper();
function frameLooper(){
window.requestAnimationFrame(frameLooper);
fbc_array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(fbc_array);
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "rgb(30, 180, 255)";
for(var i = 0; i < bars; i++){
bar_x = i * 3;
bar_width = 2;
bar_height = -(fbc_array[i] / 2);
ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
}
}
}

提前感谢

编辑:

var connection = new RTCMultiConnection();
connection.socketURL = 'URL...';
connection.socketMessageEvent = 'message';
connection.session = { audio: true, video: false, oneway: true };
connection.mediaConstraints = { audio: true, video: false }
connection.sdpConstraints.mandatory = { OfferToReceiveAudio: false, OfferToReceiveVideo: false };
connection.onstream = function(event){
var mediaElement = event.mediaElement;
mediaElement.muted = true;
mediaElement.volume = 1;
mediaElement.id = event.streamid;
$("#elementHtml").append(mediaElement);
startUserMedia(event.stream);

我不得不承认,我不知道你正在使用的RTCMultiConnection库,更不知道它的在流处理程序,也不知道event.mediaElement来自哪里,所以你可能不得不尝试一下。

但是,不管这个event.MediaStream是如何链接到MediaStream的,我将尝试列举一些基本的方法来实现它。

  1. 您想通过屏幕内MediaElement的默认控件控制输出音量:在这种情况下,将此MediaElement的srcObject设置为MediaStream,不要将其静音,也不要将分析器节点连接到音频上下文的目标:

starter.onclick = function(){
this.parentNode.removeChild(this);
getStream(onstream);
};
function onstream(stream) {
// Set our in-doc-audio as the audio output
// I don't know if your event.MediaStream could work as-is... You will have to try.
var audio = document.querySelector('audio');
audio.srcObject = stream;
startUserMedia(stream);
audio.play();
}
function startUserMedia(stream) {
var canvas, ctx, again, fbc_array, bars = 100,
bar_x, bar_width, bar_height;
var context = new (window.AudioContext || window.webkitAudioContext)();
var analyser = context.createAnalyser(),
source = context.createMediaStreamSource(stream);
source.connect(analyser);
// In this case we don't connect to the audioCtx destination
//analyser.connect(context.destination);
canvas = document.getElementById("analyser");
ctx = canvas.getContext("2d");
frameLooper();
function frameLooper() {
window.requestAnimationFrame(frameLooper);
fbc_array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(fbc_array);
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "rgb(30, 180, 255)";
for (var i = 0; i < bars; i++) {
bar_x = i * 3;
bar_width = 2;
bar_height = -(fbc_array[i] / 2);
ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
}
}
}
// Snippet way to get a MediaStream
function getStream(callback) {
var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
aud.crossOrigin = true;
aud.onloadedmetadata = function() {
	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
src = ctx.createMediaElementSource(this),
streamNode = ctx.createMediaStreamDestination();
src.connect(streamNode);
callback(streamNode.stream);
};
aud.play();
}
#starter ~ *{
visibility: hidden;
}
<button id="starter">start</button>
<audio controls></audio>
<canvas id="analyser"></canvas>

  1. 您想通过自制输入控制输出音量:然后甚至不要使用MediaElement,只需创建一个gainNode,您将在其上连接AnalyserNode,并连接到目标。要控制输出音量,只需设置gainNode.gainvalue即可

starter.onclick = function(){
this.parentNode.removeChild(this);
getStream(startUserMedia);
};
function startUserMedia(stream) {
var canvas, ctx, again, fbc_array, bars = 100,
bar_x, bar_width, bar_height;
var context = new (window.AudioContext || window.webkitAudioContext)();
var analyser = context.createAnalyser();
// create a gainNode that will control our output volume
var gainNode = context.createGain();
// control it from our <input>
vol.oninput = function(){
gainNode.gain.value = this.value;
};
source = context.createMediaStreamSource(stream);
source.connect(analyser);
// In this case we do connect the analyser output to the gainNode
analyser.connect(gainNode);
// and the gainNode to the context's destination
gainNode.connect(context.destination);
canvas = document.getElementById("analyser");
ctx = canvas.getContext("2d");
frameLooper();
function frameLooper() {
window.requestAnimationFrame(frameLooper);
fbc_array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(fbc_array);
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "rgb(30, 180, 255)";
for (var i = 0; i < bars; i++) {
bar_x = i * 3;
bar_width = 2;
bar_height = -(fbc_array[i] / 2);
ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
}
}
}
// Snippet way to get a MediaStream
function getStream(callback) {
var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
aud.crossOrigin = true;
aud.onloadedmetadata = function() {
	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
src = ctx.createMediaElementSource(this),
streamNode = ctx.createMediaStreamDestination();
src.connect(streamNode);
callback(streamNode.stream)
};
aud.play();
}
#starter ~ *{
visibility: hidden;
}
<button id="starter">start</button>
<label>volume: <input type="range" min="0" max="1" value="1" step="0.05" id="vol"></label><br>
<canvas id="analyser"></canvas>

  1. 您想要从自制输入控制[和output]音量:类似于2.,只是您将在mediaStreamSource和分析器之间添加另一个gainNode:

starter.onclick = function(){
this.parentNode.removeChild(this);
getStream(startUserMedia);
};
function startUserMedia(stream) {
var canvas, ctx, again, fbc_array, bars = 100,
bar_x, bar_width, bar_height;
var context = new (window.AudioContext || window.webkitAudioContext)();
var analyser = context.createAnalyser();
// create two gainNodes
var gainNode_in = context.createGain();
vol_in.oninput = function(){
gainNode_in.gain.value = this.value;
};
var gainNode_out = context.createGain();
vol_out.oninput = function(){
gainNode_out.gain.value = this.value;
};

source = context.createMediaStreamSource(stream);
source.connect(gainNode_in);  // connect to the input gainNode
gainNode_in.connect(analyser);
analyser.connect(gainNode_out);
// and the gainNode to the context's destination
gainNode_out.connect(context.destination);
canvas = document.getElementById("analyser");
ctx = canvas.getContext("2d");
frameLooper();
function frameLooper() {
window.requestAnimationFrame(frameLooper);
fbc_array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(fbc_array);
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "rgb(30, 180, 255)";
for (var i = 0; i < bars; i++) {
bar_x = i * 3;
bar_width = 2;
bar_height = -(fbc_array[i] / 2);
ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
}
}
}
// Snippet way to get a MediaStream
function getStream(callback) {
var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
aud.crossOrigin = true;
aud.onloadedmetadata = function() {
	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
src = ctx.createMediaElementSource(this),
streamNode = ctx.createMediaStreamDestination();
src.connect(streamNode);
callback(streamNode.stream)
};
aud.play();
}
#starter ~ *{
visibility: hidden;
}
<button id="starter">start</button>
<label>volume in: <input type="range" min="0" max="1" value="1" step="0.05" id="vol_in"></label><br>
<label>volume out: <input type="range" min="0" max="1" value="1" step="0.05" id="vol_out"></label><br>
<canvas id="analyser"></canvas>

  1. 您想控制MediaElement的输入音量:为了获得跨浏览器体验,您必须收听元素的volumechange,并添加类似于3中的gainNode_in。

starter.onclick = function(){
this.parentNode.removeChild(this);
getStream(onstream);
};
function onstream(stream) {
var audio = document.querySelector('audio');
audio.srcObject = stream;
startUserMedia(stream, audio);
audio.play();
}
function startUserMedia(stream, audio) {
var canvas, ctx, again, fbc_array, bars = 100,
bar_x, bar_width, bar_height;
var context = new (window.AudioContext || window.webkitAudioContext)();
var analyser = context.createAnalyser(),
source = context.createMediaStreamSource(stream),
gainNode = context.createGain();

audio.onvolumechange = function(){
gainNode.gain.value = this.volume;
};

source.connect(gainNode);
gainNode.connect(analyser);
canvas = document.getElementById("analyser");
ctx = canvas.getContext("2d");
frameLooper();
function frameLooper() {
window.requestAnimationFrame(frameLooper);
fbc_array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(fbc_array);
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "rgb(30, 180, 255)";
for (var i = 0; i < bars; i++) {
bar_x = i * 3;
bar_width = 2;
bar_height = -(fbc_array[i] / 2);
ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
}
}
}
// Snippet way to get a MediaStream
function getStream(callback) {
var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
aud.crossOrigin = true;
aud.onloadedmetadata = function() {
	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
src = ctx.createMediaElementSource(this),
streamNode = ctx.createMediaStreamDestination();
src.connect(streamNode);
callback(streamNode.stream)
};
aud.play();
}
#starter ~ *{
visibility: hidden;
}
<button id="starter">start</button>
<audio controls></audio>
<canvas id="analyser"></canvas>

最新更新