Je travaille sur une application audio/vidéo utilisant WebAssembly et je rencontre des problèmes avec la lecture du son. Je capture l'audio du microphone via getUserMedia, et j'ai créé une preuve de concept pour transmettre l'audio capturé au format PCM brut. L'audio est envoyé au thread principal via un message de publication, puis transmis à un deuxième worklet audio via une connexion directe pour la lecture. Pendant que la chaîne fonctionne et que les morceaux audio sont reçus, le son est déformé par des crépitements, le rendant inaudible.
Thread principal en javascript :
Code : Tout sélectionner
<!DOCTYPE html>
<html lang="en">
<head>
<title>Audio worklet capture/renderer</title>
</head>
<body>
<h2>Audio worklet</h2>
<img alt="" src="HP.gif">
<button id="cmdStart"></button>
<button id="cmdStop" disabled></button>
<script>
const cmdStart = document.getElementById('cmdStart');
const cmdStop = document.getElementById('cmdStop');
let hasUserGesture = false;
let FRate = 48000;
let captureNode;
let rendererNode;
let mediaStream;
let audioContext;
async function loadWorklets() {
await audioContext.audioWorklet.addModule('AudioWorkletProcessor-capture.js');
console.log('AudioWorkletProcessor-capture loaded');
await audioContext.audioWorklet.addModule('AudioWorkletProcessor-renderer.js');
console.log('AudioWorkletProcessor-renderer loaded');
}
function sendPCMBuffer(pcmData) {
// Create an AudioBuffer from the PCM data
const audioBuffer = audioContext.createBuffer(1, pcmData.length, audioContext.sampleRate);
audioBuffer.copyToChannel(pcmData, 0);
// Create an AudioBufferSourceNode and connect it to the audio worklet
const source = audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(rendererNode);
source.start();
}
cmdStop.textContent = "Stop";
cmdStop.disabled = true;
cmdStop.addEventListener('click', stopAudio);
cmdStart.textContent = "Start";
cmdStart.addEventListener('click', startAudio);
async function startAudio() {
console.log('Start button clicked');
hasUserGesture = true;
cmdStart.disabled = true;
cmdStop.disabled = false;
if (!audioContext) audioContext = new AudioContext(); /*{
latencyHint: 'high',
sampleRate: 44100
});*/
// Check if AudioContext is suspended and resume if necessary
if (audioContext.state === 'suspended' && hasUserGesture) {
await audioContext.resume();
console.log('AudioContext resumed');
}
// Load the worklets before creating the nodes
await loadWorklets();
rendererNode = new AudioWorkletNode(audioContext, 'audioworklet-audio-renderer');
rendererNode.port.start();
captureNode = new AudioWorkletNode(audioContext, 'audioworklet-audio-capture');
if (captureNode) {
if (rendererNode) {
rendererNode.port.postMessage(1);
}
captureNode.port.onmessage = (event) => {
console.log(`Received data from capture processor: ${event.data}`);
if (rendererNode) {
console.log(typeof event.data);
var audio_chunk = event.data;
//rendererNode.port.postMessage(audio_chunk.slice(0));
if ((audio_chunk.length>0) && (audio_chunk[0].length>0)) {
for (var i=0; i< audio_chunk[0].length; i++){
var fa = audio_chunk[0][i];
sendPCMBuffer(fa);
}
}
//rendererNode.port.postMessage(event.data);
}
};
captureNode.port.start();
} else {
console.error('Error: captureNode is undefined');
}
mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true });
const sourceNode = audioContext.createMediaStreamSource(mediaStream);
sourceNode.connect(captureNode);
captureNode.connect(rendererNode);
rendererNode.connect(audioContext.destination);
}
function stopAudio() {
cmdStart.disabled = false;
cmdStop.disabled = true;
if (captureNode) {
captureNode.port.postMessage('stop');
captureNode = undefined; // Release the reference for garbage collection
}
if (rendererNode) {
rendererNode.port.postMessage('stop');
rendererNode = undefined; // Release the reference for garbage collection
}
// Stop each track individually
const tracks = mediaStream.getTracks();
tracks.forEach(track => { track.stop(); });
audioContext.suspend();
}
</script>
</script>
</body>
</html>
Worklet audio utilisé pour la capture (AudioWorkletProcessor-capture.js) :
Code : Tout sélectionner
class AudioWorkletProcessorRawPCMCapture extends AudioWorkletProcessor
{
constructor()
{
super();
this.shouldProcess = true;
this.port.onmessage = (event) => {
if (event.data === 'stop') {
this.shouldProcess = false;
}
};
}
process(inputs, outputs, parameters)
{
if (!this.shouldProcess) {
return false;
}
const input = inputs[0];
const output = outputs[0];
if (inputs) {
this.port.postMessage( inputs.slice(0) ); // Slice is to avoid detached buffer !
}
return true;
}
}
registerProcessor('audioworklet-audio-capture', AudioWorkletProcessorRawPCMCapture);
Worklet audio utilisé pour le rendu audio (AudioWorkletProcessor-renderer.js) :
Code : Tout sélectionner
class AudioWorkletProcessorRawPCMRenderer extends AudioWorkletProcessor
{
constructor()
{
super();
this.shouldProcess = true;
this.chunkQueue = [];
this.gain = 1.0; // Initialize gain to 1.0 (no gain)
// Listen message that contain PCM audio chunks from the main thread
this.port.onmessage = (event) =>
{
this.port.onmessage = (event) => {
if (event.data) {
if (typeof event.data === 'number') {
console.log(`AudioWorkletProcessorRawPCMRenderer / Receiving new audio gain : ${event.data} `);
this.gain = event.data; // Update gain if a number is received
} else if (typeof event.data === 'string') {
if (event.data === 'stop') {
console.log('AudioWorkletProcessorRawPCMRenderer / Stopping audio processing');
this.shouldProcess = false; // disable the audio worklet
}
}
}
};
};
}
process(inputs, outputs, parameters)
{
if (!this.shouldProcess) {
return false; // Stop the audio processor
}
if (!inputs || inputs.length === 0) {
return true; // Continue processing even if no input
}
const inputBuffer = inputs[0][0];
const outputChannel = outputs[0][0];
if (inputBuffer.length > 0) {
console.log("AudioWorkletProcessorRawPCMRenderer / Process the input buffer.");
outputs[0][0].set(inputBuffer);
} /*else {
outputs[0][0].fill(0); // Example: Fill with silence
}*/
return true; // Continue the audio process
}
}
registerProcessor('audioworklet-audio-renderer', AudioWorkletProcessorRawPCMRenderer);
Des idées sur la façon de résoudre ce problème ? Cela pourrait-il être lié au traitement en temps réel ? Les morceaux audio reçus par le worklet audio (moteur de rendu) peuvent être lus trop rapidement, sans attendre la fin du précédent, ce qui peut provoquer des crépitements dus à des chevauchements ou à d'autres problèmes.