JS - Bpm Analyzer:AudioContext.decodeAudioData 的参数不能是分离的缓冲区



我正在进行一个音频分析项目,我正在寻找一种使用JavaScript从mp3/wav音频中获取bpm的方法。

这是我发现的唯一一篇关于它的帖子:使用javascript检测音频输入的bpm?

这是我找到的唯一解决方案:

(function() {
var AUDIO_URL = 'media/song.mp3';
var CHANNELS, NUMBER_OF_PREVIOUS_SAMPLES, SAMPLES_PER_INSTANT_ENERGY, SAMPLE_RATE, THRESHOLD_CONSTANT, VARIANCE_COEFFICIENT, audioContext; 
var audioSample, beatDetector, beatDetectorVisualisation, beatVisualisation, getAudioContext, getOfflineAudioContext, loadAudioFromUrl, main; 
var playTrack, sampleLengthSeconds, timeline, trackStartTime, updateAudioFromArrayBuffer, updateAudioFromPcmData, updateBeats, updateSongPlace;
var updateVisualisation, windowEnd, windowStart,
__extends = function(child, parent) { 
for (var key in parent) { 
if (__hasProp.call(parent, key)) child[key] = parent[key]; 
} function ctor() { 
this.constructor = child; 
} 
ctor.prototype = parent.prototype; 
child.prototype = new ctor(); 
child.__super__ = parent.prototype; return child; 
},
__hasProp = {}.hasOwnProperty;
THRESHOLD_CONSTANT = 1.5;
VARIANCE_COEFFICIENT = 0;
SAMPLES_PER_INSTANT_ENERGY = 1024;
NUMBER_OF_PREVIOUS_SAMPLES = 43;
CHANNELS = 1;
SAMPLE_RATE = 44100;
audioContext = null;
audioSample = null;
sampleLengthSeconds = 0;
trackStartTime = 0;
beatVisualisation = null;
beatDetectorVisualisation = null;
beatDetector = null;
windowStart = 0;
windowEnd = null;
timeline = null;
getAudioContext = function() {
var AudioContext;
AudioContext = window.AudioContext || window.webkitAudioContext;
if (audioContext == null) {
audioContext = new AudioContext();
}
return audioContext;
};
getOfflineAudioContext = function(channels, length, sampleRate) {
var OfflineAudioContext;
OfflineAudioContext = window.OfflineAudioContext || window.webkitOfflineAudioContext;
return new OfflineAudioContext(channels, length, sampleRate);
};
this.SoundEnergyBeatDetector = (function() {
var BEAT_MIN_DISTANCE_SAMPLES, IMPULSE_TRAIN_SIZE, MAX_DISTANCE_MULTIPLIER, MAX_SEARCH_WINDOW_SIZE;
SAMPLE_RATE = 44100;
BEAT_MIN_DISTANCE_SAMPLES = 10;
MAX_DISTANCE_MULTIPLIER = 2;
IMPULSE_TRAIN_SIZE = 108;
MAX_SEARCH_WINDOW_SIZE = 2;
function SoundEnergyBeatDetector() {}
SoundEnergyBeatDetector.prototype.detectBeats = function(
pcmAudioData, previousEnergyVarianceCoefficient, previousAverageEnergyCoefficient, samplesPerInstantEnergy, numberOfPreviousEnergies) {
var c, currentIndex, currentTimeSeconds, distanceBetweenBeatIndexes, distanceInEnergyIndexBetweenBeats, i, instantEnergySum, lastBeatIndex;
var maxCountIndex, meanCount, pcm, previousEnergies, previousEnergiesAverage, previousEnergiesIndex, previousEnergiesSum, previousEnergiesVariance;
var previousEnergy, sumOfDifferencesFromAverage, threshold, v, _i, _j, _k, _len, _len1, _len2, _ref;
this.maximumEnergies = [];
distanceInEnergyIndexBetweenBeats = [];
lastBeatIndex = 0;
this.energies = [];
this.averageEnergies = [];
this.maxEnergy = 0;
previousEnergies = [];
previousEnergiesIndex = 0;
instantEnergySum = 0;
for (i = _i = 0, _len = pcmAudioData.length; _i < _len; i = ++_i) {
pcm = pcmAudioData[i];
instantEnergySum += Math.pow(pcm, 2);
if (i % samplesPerInstantEnergy !== 0) {
continue;
}
if (instantEnergySum > this.maxEnergy) {
this.maxEnergy = instantEnergySum;
}
currentTimeSeconds = i / SAMPLE_RATE;
this.energies.push([currentTimeSeconds, instantEnergySum]);
if (previousEnergies.length < numberOfPreviousEnergies) {
previousEnergies.push(instantEnergySum);
} else {
previousEnergiesSum = 0;
for (_j = 0, _len1 = previousEnergies.length; _j < _len1; _j++) {
previousEnergy = previousEnergies[_j];
previousEnergiesSum += previousEnergy;
}
previousEnergiesAverage = previousEnergiesSum / numberOfPreviousEnergies;
sumOfDifferencesFromAverage = 0;
for (_k = 0, _len2 = previousEnergies.length; _k < _len2; _k++) {
previousEnergy = previousEnergies[_k];
sumOfDifferencesFromAverage += Math.pow(previousEnergy - previousEnergiesAverage, 2);
}
previousEnergiesVariance = sumOfDifferencesFromAverage / numberOfPreviousEnergies;
v = previousEnergiesVariance * previousEnergyVarianceCoefficient;
c = v + parseFloat(previousAverageEnergyCoefficient);
threshold = c * previousEnergiesAverage;
this.averageEnergies.push([currentTimeSeconds, threshold]);
if (instantEnergySum > threshold) {
currentIndex = this.averageEnergies.length - 1;
distanceBetweenBeatIndexes = currentIndex - lastBeatIndex;
if (distanceBetweenBeatIndexes > BEAT_MIN_DISTANCE_SAMPLES) {
lastBeatIndex = currentIndex;
this.maximumEnergies.push(currentTimeSeconds);
distanceInEnergyIndexBetweenBeats.push(distanceBetweenBeatIndexes);
}
}
previousEnergies.splice(previousEnergiesIndex, 1, instantEnergySum);
}
previousEnergiesIndex++;
if (previousEnergiesIndex >= numberOfPreviousEnergies) {
previousEnergiesIndex = 0;
}
instantEnergySum = 0;
}
_ref = this._calculateTempo(distanceInEnergyIndexBetweenBeats, numberOfPreviousEnergies, samplesPerInstantEnergy), meanCount = _ref[0], maxCountIndex = _ref[1];
return this._calculateConvolution(meanCount, maxCountIndex);
};
SoundEnergyBeatDetector.prototype._calculateTempo = function(distanceInEnergyIndexBetweenBeats, numberOfPreviousEnergies, samplesPerInstantEnergy) {
var a, b, beatDistanceCount, beatDistanceCounts, distance, divisor, i, maxCountIndex, maxCountSoFar, maxDistanceBetwenBeats, meanCount, neighbourCount, neighbourIndex, _i, _j, _k, _len, _len1;
maxDistanceBetwenBeats = numberOfPreviousEnergies * MAX_DISTANCE_MULTIPLIER;
beatDistanceCounts = [];
for (i = _i = 0; 0 <= maxDistanceBetwenBeats ? _i <= maxDistanceBetwenBeats : _i >= maxDistanceBetwenBeats; i = 0 <= maxDistanceBetwenBeats ? ++_i : --_i) {
beatDistanceCounts.push(0);
}
for (_j = 0, _len = distanceInEnergyIndexBetweenBeats.length; _j < _len; _j++) {
distance = distanceInEnergyIndexBetweenBeats[_j];
if (distance < maxDistanceBetwenBeats) {
beatDistanceCounts[distance]++;
}
}
maxCountIndex = 0;
maxCountSoFar = 0;
for (i = _k = 0, _len1 = beatDistanceCounts.length; _k < _len1; i = ++_k) {
beatDistanceCount = beatDistanceCounts[i];
if (beatDistanceCount > maxCountSoFar) {
maxCountSoFar = beatDistanceCount;
maxCountIndex = i;
}
}
if (maxCountIndex === beatDistanceCounts.length - 1) {
neighbourIndex = maxCountIndex - 1;
} else if (maxCountIndex === 0) {
neighbourIndex = maxCountIndex + 1;
} else {
a = maxCountIndex - 1;
b = maxCountIndex + 1;
if (beatDistanceCounts[a] > beatDistanceCounts[b]) {
neighbourIndex = a;
} else {
neighbourIndex = b;
}
}
neighbourCount = beatDistanceCounts[neighbourIndex];
divisor = maxCountSoFar + neighbourCount;
if (divisor === 0) {
meanCount = 0;
} else {
meanCount = (maxCountIndex * maxCountSoFar + neighbourIndex * neighbourCount) / divisor;
}
this.bpm = 60 / (meanCount * (samplesPerInstantEnergy / SAMPLE_RATE));
return [meanCount, maxCountIndex];
};
SoundEnergyBeatDetector.prototype._calculateConvolution = function(meanCount, maxCountIndex) {
var b, beatsConvolution, conv, currentConv, espace, i, impulseTrain, j, localMaxPosition, maxConv, maxConvIndex, offsetIndexLeft, offsetIndexRight, ratio, searchForMaxInWindow, _i, _j, _k, _l, _len, _len1, _m, _ref, _ref1, _results;
impulseTrain = [];
espace = 0;
impulseTrain.push(1);
for (i = _i = 1; 1 <= IMPULSE_TRAIN_SIZE ? _i <= IMPULSE_TRAIN_SIZE : _i >= IMPULSE_TRAIN_SIZE; i = 1 <= IMPULSE_TRAIN_SIZE ? ++_i : --_i) {
if (espace >= meanCount) {
impulseTrain.push(1);
espace -= meanCount;
} else {
impulseTrain.push(0);
}
espace += 1;
}
beatsConvolution = [];
this.convolution = [];
maxConv = 0;
maxConvIndex = 0;
for (i = _j = 0, _ref = this.averageEnergies.length - IMPULSE_TRAIN_SIZE - 1; 0 <= _ref ? _j <= _ref : _j >= _ref; i = 0 <= _ref ? ++_j : --_j) {
beatsConvolution[i] = 0;
this.convolution[i] = [this.averageEnergies[i][0], 0];
for (j = _k = 0; 0 <= IMPULSE_TRAIN_SIZE ? _k <= IMPULSE_TRAIN_SIZE : _k >= IMPULSE_TRAIN_SIZE; j = 0 <= IMPULSE_TRAIN_SIZE ? ++_k : --_k) {
this.convolution[i][1] += this.averageEnergies[i + j][1] * impulseTrain[j];
}
currentConv = Math.abs(this.convolution[i][1]);
if (currentConv > maxConv) {
maxConv = currentConv;
maxConvIndex = i;
}
}
ratio = 1 / maxConv;
_ref1 = this.convolution;
for (_l = 0, _len = _ref1.length; _l < _len; _l++) {
conv = _ref1[_l];
conv[1] *= ratio;
}
searchForMaxInWindow = (function(_this) {
return function(offset) {
var maxIndex, maxSoFar, _m, _ref2, _ref3;
maxSoFar = 0;
maxIndex = offset;
for (i = _m = _ref2 = offset - MAX_SEARCH_WINDOW_SIZE, _ref3 = offset + MAX_SEARCH_WINDOW_SIZE; _ref2 <= _ref3 ? _m <= _ref3 : _m >= _ref3; i = _ref2 <= _ref3 ? ++_m : --_m) {
if (i < 0) {
continue;
}
if (i >= _this.convolution.length) {
break;
}
conv = _this.convolution[i][1];
if (conv > maxSoFar) {
maxSoFar = conv;
maxIndex = i;
}
}
return maxIndex;
};
})(this);
beatsConvolution[maxConvIndex] = 1;
offsetIndexRight = maxConvIndex + maxCountIndex;
while (offsetIndexRight < this.convolution.length && this.convolution[offsetIndexRight][1] > 0) {
localMaxPosition = searchForMaxInWindow(offsetIndexRight);
beatsConvolution[localMaxPosition] = 1;
offsetIndexRight = localMaxPosition + maxCountIndex;
}
offsetIndexLeft = maxConvIndex - maxCountIndex;
while (offsetIndexLeft > 0) {
localMaxPosition = searchForMaxInWindow(offsetIndexLeft);
beatsConvolution[localMaxPosition] = 1;
offsetIndexLeft = localMaxPosition - maxCountIndex;
}
this.beats = [];
_results = [];
for (i = _m = 0, _len1 = beatsConvolution.length; _m < _len1; i = ++_m) {
b = beatsConvolution[i];
if (b > 0) {
_results.push(this.beats.push(this.convolution[i][0]));
} else {
_results.push(void 0);
}
}
return _results;
};
return SoundEnergyBeatDetector;
})();

loadAudioFromUrl = function(url, callback) {
var request;
request = new XMLHttpRequest;
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
return callback(request.response);
};
return request.send();
};
this.AbstractAudioSample = (function() {
function AbstractAudioSample() {
this.playing = false;
}
AbstractAudioSample.prototype.loadAudio = function() {
throw 'Load Audio must be implemented by subclass';
};
AbstractAudioSample.prototype.tryPlay = function(offset, gain) {
var gainNode;
if (this.buffer == null) {
return;
}
this.source = this._ctx.createBufferSource();
this.source.buffer = this.buffer;
gainNode = this._ctx.createGain();
if (gain != null) {
gainNode.gain.value = gain;
}
this.source.connect(gainNode);
gainNode.connect(this._ctx.destination);
if ($.isNumeric(offset)) {
this.source.start(0, offset);
} else {
this.source.start(0);
}
return this.playing = true;
};
AbstractAudioSample.prototype.stop = function() {
if (this.source == null) {
return;
}
this.source.stop(0);
return this.playing = false;
};
return AbstractAudioSample;
})();
this.ArrayBufferAudioSample = (function(_super) {
__extends(ArrayBufferAudioSample, _super);
function ArrayBufferAudioSample(_at_arrayBuffer) {
this.arrayBuffer = _at_arrayBuffer;
ArrayBufferAudioSample.__super__.constructor.apply(this, arguments);
}
ArrayBufferAudioSample.prototype.loadAudio = function(_at__ctx, callback) {
this._ctx = _at__ctx;
return this._ctx.decodeAudioData(this.arrayBuffer, (function(_this) {
return function(buffer) {
_this.buffer = buffer;
if (callback != null) {
return callback(_this);
}
};
})(this));
};
return ArrayBufferAudioSample;
})(AbstractAudioSample);
this.PcmAudioGenerator = (function() {
function PcmAudioGenerator() {}
PcmAudioGenerator.prototype.getPcmAudioData = function(offlineContext, audioSample, callback) {
var renderAudioSampleOffline;
renderAudioSampleOffline = (function(_this) {
return function(audioSample) {
offlineContext.oncomplete = function(event) {
return callback(event.renderedBuffer.getChannelData(0));
};
audioSample.tryPlay();
return offlineContext.startRendering();
};
})(this);
return audioSample.loadAudio(offlineContext, renderAudioSampleOffline);
};
return PcmAudioGenerator;
})();
updateSongPlace = function(fractionThroughSong) {
if (audioSample.playing) {
audioSample.stop();
}
trackStartTime = windowStart + ((windowEnd - windowStart) * fractionThroughSong);
return timeline.render(trackStartTime, windowStart, windowEnd);
};
updateVisualisation = function() {
beatDetectorVisualisation.render(beatDetector, windowStart, windowEnd);
return timeline.render(trackStartTime, windowStart, windowEnd);
};
updateBeats = function(pcmAudioData) {
var nOPS, pAEC, pEVC, sPIE;
pAEC = THRESHOLD_CONSTANT;
pEVC = VARIANCE_COEFFICIENT;
sPIE = SAMPLES_PER_INSTANT_ENERGY;
nOPS = NUMBER_OF_PREVIOUS_SAMPLES;
beatDetector = new SoundEnergyBeatDetector();
beatDetector.detectBeats(pcmAudioData, pEVC, pAEC, sPIE, nOPS);
if (windowEnd == null) {
windowEnd = sampleLengthSeconds;
}
console.log("bpm is " + beatDetector.bpm);
$('#bpm').text((beatDetector.bpm.toFixed(2)) + "bpm");
return updateVisualisation();
};
updateAudioFromPcmData = function(pcmAudioData) {
var waveformVisualisation;
waveformVisualisation = new WaveformVisualisation('#waveform', pcmAudioData);
waveformVisualisation.render();
return updateBeats(pcmAudioData);
};
updateAudioFromArrayBuffer = function(arrayBuffer) {
audioContext = getAudioContext();
audioSample = new ArrayBufferAudioSample(arrayBuffer);
return audioSample.loadAudio(audioContext, function(audioSample) {
var length, offlineAudioContext, pcmAudioGenerator, pcmAudioSample;
pcmAudioSample = new ArrayBufferAudioSample(arrayBuffer);
length = audioSample.buffer.length;
sampleLengthSeconds = length / SAMPLE_RATE;
offlineAudioContext = getOfflineAudioContext(CHANNELS, length, SAMPLE_RATE);
pcmAudioGenerator = new PcmAudioGenerator();
return pcmAudioGenerator.getPcmAudioData(offlineAudioContext, pcmAudioSample, updateAudioFromPcmData);
});
};

}).call(this); 

我得到以下错误:

AudioContext.decodeAudioData的参数不能是分离的缓冲区

行:

return this._ctx.decodeAudioData(this.arrayBuffer, (function(_this) {

我试图在不删除重要信息的情况下尽可能缩短代码,但请随时编辑。

问题是不能将相同的ArrayBufferdecodeAudioData()一起使用两次。您可以将其视为decodeAudioData()将消耗ArrayBuffer,这意味着它在之后就消失了。真正发生的是,ArrayBuffer在后台通过Web Audio API传输到另一个线程。

一个简单的解决方案是在解码之前复制ArrayBuffer

new ArrayBufferAudioSample(arrayBuffer.slice(0));

顺便说一下,我已经创建了一个简单的节拍检测器,称为网络音频节拍检测器。也许这对你有用。除此之外,还有essentia.js,它有一个更精细的节拍检测。

相关内容

最新更新