Source: AudioGraphProcessor.js

const DEBUG = false;

/**
 * AudioGraphProcessor class.
 * It acts as a bridge between JS and WASM audio processing, simplifies processing AudioGraph instances.
 */
class AudioGraphProcessor {
    /**
     * AudioGraphProcessor constructor.
     * @param {WASMJSAPI} wasmJSAPI An instance of WASMJSAPI.
     * @param {Array} inputChannelLayout The input channel layout for the audio graph. E.g. [2] means ones stereo bus, [2, 1] means a stereo and a mono audio buses.
     * @param {Array} outputChannelLayout The output channel layout for the audio graph. E.g. [2] means ones stereo bus, [2, 1] means a stereo and a mono audio buses.
     * @param {Number} maxNumberOfFrames Max number of frames that the graph will be able to process.
     * @param {Number} sampleRate Sample rate.
     */
    constructor(wasmJSAPI, inputChannelLayout, outputChannelLayout, maxNumberOfFrames, sampleRate) {
        if (DEBUG) console.log("[SB] [AudioGraphProcessor] constructor: inputChannelLayout=" + inputChannelLayout + " outputChannelLayout=" + outputChannelLayout + " maxNumberOfFrames=" + maxNumberOfFrames + " sampleRate=" + sampleRate);
        if (DEBUG) console.log("[SB] [AudioGraphProcessor] inputChannelLayout", inputChannelLayout);
        if (DEBUG) console.log("[SB] [AudioGraphProcessor] outputChannelLayout", outputChannelLayout);
        if (!Array.isArray(inputChannelLayout)) {
            return console.error("[SB] [AudioGraphProcessor] inputChannelLayout is not an array");
        }
        if (!Array.isArray(outputChannelLayout)) {
            return console.error("[SB] [AudioGraphProcessor] outputChannelLayout is not an array");
        }

        this.wasmJSAPI = wasmJSAPI;
        this.inputChannelLayout = inputChannelLayout;
        this.outputChannelLayout = outputChannelLayout;
        this.maxNumberOfFrames = maxNumberOfFrames;
        this.sampleRate = sampleRate;

        const numberOfInputBuses = inputChannelLayout.length;
        const numberOfOutputBuses = outputChannelLayout.length;

        // Input bus processing
        const inputAudioBusList = new wasmJSAPI.classes.AudioBusVector(numberOfInputBuses);
        this.inputAudioDataArray = [];
        this.inputAudioBuffers = [];
        this.inputViews = []
        for (let busIndex = 0; busIndex < numberOfInputBuses; busIndex++) {
            const numberOfChannels = inputChannelLayout[busIndex];
            const inputAudioData = new wasmJSAPI.classes.AudioDataFloat(numberOfChannels, maxNumberOfFrames);
            const inputAudioBuffer = new wasmJSAPI.classes.AudioBufferFloat(numberOfChannels, maxNumberOfFrames, false, sampleRate, inputAudioData.getBuffers());

            let busInputViews = [];
            for (let chIndex = 0; chIndex < numberOfChannels; chIndex++) {
                busInputViews[chIndex] = new Float32Array(wasmJSAPI.sdkAPI.memory, inputAudioBuffer.getReadPointer(chIndex), maxNumberOfFrames);
            }

            const inputAudioBus = wasmJSAPI.classes.AudioBus.createWithPtr(inputAudioBusList.getBus(busIndex));
            inputAudioBus.setBuffer(inputAudioBuffer.wasmMemAddress);
            this.inputViews.push(busInputViews);
            this.inputAudioDataArray.push(inputAudioData);
            this.inputAudioBuffers.push(inputAudioBuffer);
        }

        // Output bus processing
        const outputAudioBusList = new wasmJSAPI.classes.AudioBusVector(numberOfOutputBuses);
        this.outputAudioDataArray = [];
        this.outputAudioBuffers = [];
        this.outputViews = [];
        for (let outBusIndex = 0; outBusIndex < numberOfOutputBuses; outBusIndex++) {
            const numberOfChannels = outputChannelLayout[outBusIndex];
            const outputAudioData = new wasmJSAPI.classes.AudioDataFloat(numberOfChannels, maxNumberOfFrames);
            const outputAudioBuffer = new wasmJSAPI.classes.AudioBufferFloat(numberOfChannels, maxNumberOfFrames, false, sampleRate, outputAudioData.getBuffers());

            let busInputViews = [];
            for (let outputChIndex = 0; outputChIndex < numberOfChannels; outputChIndex++) {
                busInputViews[outputChIndex] = new Float32Array(wasmJSAPI.sdkAPI.memory, outputAudioBuffer.getReadPointer(outputChIndex), maxNumberOfFrames);
            }

            const outputAudioBus = wasmJSAPI.classes.AudioBus.createWithPtr(outputAudioBusList.getBus(outBusIndex));
            outputAudioBus.setBuffer(outputAudioBuffer.wasmMemAddress);
            this.outputViews.push(busInputViews);
            this.outputAudioDataArray.push(outputAudioData);
            this.outputAudioBuffers.push(outputAudioBuffer);
        }

        this.inputAudioBusList = inputAudioBusList;
        this.outputAudioBusList = outputAudioBusList;
    }

    /**
     * AudioGraphProcessor destructor. Deallocates all objects.
     */
    destruct() {
        if (DEBUG) console.log("[SB] [AudioGraphProcessor] destruct");
        this.inputAudioBusList.destruct();
        this.outputAudioBusList.destruct();
        this.inputAudioBuffers.map(item => { item.destruct() });
        this.outputAudioBuffers.map(item => { item.destruct() });
        this.inputAudioDataArray.map(item => { item.destruct() });
        this.outputAudioDataArray.map(item => { item.destruct() });
    }

    /**
     * Processes audio from the AudioWorkletNode.
     * @param {Array} inputs Audio inputs.
     * @param {Array} outputs Audio outputs.
     * @param {AudioGraph} audioGraph The audio graph to process.
     * @returns {Boolean} True if the graph was processed successfully, false otherwise.
     */
    processGraph(inputs, outputs, audioGraph) {

        if (inputs.length != this.inputChannelLayout.length) {
            console.warn("[SB] [AudioGraphProcessor] Number of input buses (" + inputs.length + ") does not match graph input layout (" + this.inputChannelLayout.length + ")");
        }
        if (outputs.length != this.outputChannelLayout.length) {
            console.warn("[SB] [AudioGraphProcessor] Number of output buses (" + outputs.length + ") does not match graph output layout (" + this.outputChannelLayout.length + ")");
        }

        // Copy input data TO wasm memory
        for (let busIndex = 0; busIndex < inputs.length; busIndex++) {
            const numberOfChannels = inputs[busIndex].length;
            if (numberOfChannels != this.inputChannelLayout[busIndex]) {
                console.error("[SB] [AudioGraphProcessor] Number of input channels does not match at bus " + busIndex + " (expected " + this.inputChannelLayout[busIndex] + ", got " + numberOfChannels + ")");
            }
            for (var channelIndex = 0; channelIndex < numberOfChannels; channelIndex++) {
                this.inputViews[busIndex][channelIndex].set(inputs[busIndex][channelIndex]);
            }
        }

        // Churn the web assembly
        audioGraph.process(this.inputAudioBusList.wasmMemAddress, this.outputAudioBusList.wasmMemAddress);

        // Copy output data FROM wasm memory
        for (let busIndex = 0; busIndex < outputs.length; busIndex++) {
            const numberOfChannels = outputs[busIndex].length;
            if (numberOfChannels != this.outputChannelLayout[busIndex]) {
                console.error("[SB] [AudioGraphProcessor] Number of output channels does not match at bus " + busIndex + " (expected " + this.outputChannelLayout[busIndex] + ", got " + numberOfChannels + ")");
            }
            for (var outputChannelIndex = 0; outputChannelIndex < numberOfChannels; outputChannelIndex++) {
                outputs[busIndex][outputChannelIndex].set(this.outputViews[busIndex][outputChannelIndex]);
            }
        }

        return true;
    }

}

export default AudioGraphProcessor;