I am trying to use an audio buffer's pcmdata to produce audio waves on a canvas (node-canvas) and then create .png files from that canvas for each frame. The first thing – properly using pcmdata and/or samplerate and other attributes – is what I'm struggling with, how can I have a setTimeout that produces frames at the appropriate framerate.
I was able to decode the audio buffer to get an AudioBuffer
object that contains pcmdata, sample rate, etc…
var AudioContext = require('web-audio-api').AudioContext;
var context = new AudioContext;
var exec = require('child_process').exec;
const fs = require('fs');
function decodeSoundBuffer(buffer, ctx, canvas){
fs.readFile(__dirname + '/Polo.mp3', function(err, data){
if (err) throw err;
context.decodeAudioData(
data,
function(audioBuffer) {
console.log(audioBuffer);
pcmdata = audioBuffer.getChannelData(0);
samplerate = audioBuffer.sampleRate;
playsound('../Polo.mp3')
},
function(err) { throw err }
);
});
}
function playsound(soundfile){
var create_audio = exec('ffplay -autoexit ' + soundfile, {maxBuffer: 1024 * 500}, function (error, stdout, stderr) {
if (error !== null) {
console.log('exec error: ' + error);
}
});
}
How can I loop through the pcmdata array and produce frames at a rate that's respecting the sample rate and/or other attributes?
Thank you.
[EDIT]: So I did produce frames at 60fps and it seems like it's not the right rate.
How do I know? That audio file is 177s long, and the number of frames produced is 8310, so 8310 / 60 = 138.5
This is the code:
var AudioContext = require('web-audio-api').AudioContext;
var context = new AudioContext;
var exec = require('child_process').exec;
const fs = require('fs');
var { visualize } = require('./visualize.js');
function decodeSoundBuffer(buffer, ctx, canvas){
fs.readFile(__dirname + '/Polo.mp3', function(err, data){
if (err) throw err;
context.decodeAudioData(
data,
function(audioBuffer) {
console.log(audioBuffer);
pcmdata = audioBuffer.getChannelData(0);
samplerate = audioBuffer.sampleRate;
visualizeData(pcmdata, ctx, canvas);
playsound('../Polo.mp3')
},
function(err) { throw err }
);
});
}
function playsound(soundfile){
var create_audio = exec('ffplay -autoexit ' + soundfile, {maxBuffer: 1024 * 500}, function (error, stdout, stderr) {
if (error !== null) {
console.log('exec error: ' + error);
}
});
}
function visualizeData(pcmdata, ctx, canvas){
// interval that excutes 60 times per second, producing one frame every 16.66 ms.
var interval = 1000 / 60;
var index = 0;
var frameCount = 1;
var step = 1024;
var samplesound = setInterval(function() {
if (index >= pcmdata.length) {
clearInterval(samplesound);
console.log("finished sampling sound")
return;
}
var array = [];
for(var i = index; i < index + step ; i++){
array.push(pcmdata[i] * 200);
}
visualize(array, ctx, {}, frameCount, canvas);
frameCount++;
index += step;
}, interval, pcmdata);
}
module.exports = {
decodeSoundBuffer,
}
I need the rate at which the frames are produced to be equal to the rate of the audio, because I'm planning to use them together using ffmpeg and create a video.
Thanks.
question from:
https://stackoverflow.com/questions/66053469/node-js-how-to-properly-produce-frames-using-audiobuffers-pcm-data-and-sample