Audio Processing API Reference

Complete reference for the BeatBlocks Audio Processing API

Overview

The BeatBlocks Audio Processing API provides tools for manipulating, processing, and exporting audio. It includes a professional-grade audio processing chain, encoding/decoding capabilities, and export functionality.

Audio Processing Chain

The BeatBlocks audio processing chain consists of:

  1. Compressor - For dynamic range control
  2. Master Gain - For overall volume control
  3. Stereo Panner - For spatial positioning
  4. Limiter - For preventing clipping
// Default dynamics settings
dynamics: {
  compressor: {
    threshold: -12.0,
    knee: 12.0,
    ratio: 2.0,
    attack: 0.003,
    release: 0.25
  },
  limiter: {
    threshold: -3.0,
    knee: 0.0,
    ratio: 20.0,
    attack: 0.003,
    release: 0.25
  }
}

Audio Processing Methods

initMasterProcessingChain()

Initializes the audio processing chain with compressor, gain, and limiter nodes.

Implementation

initMasterProcessingChain() {
  // Create compressor
  this.compressor = this.audioContext.createDynamicsCompressor();
  this.compressor.threshold.value = this.dynamics.compressor.threshold;
  this.compressor.knee.value = this.dynamics.compressor.knee;
  this.compressor.ratio.value = this.dynamics.compressor.ratio;
  this.compressor.attack.value = this.dynamics.compressor.attack;
  this.compressor.release.value = this.dynamics.compressor.release;

  // Create master gain
  this.masterGain = this.audioContext.createGain();
  this.masterGain.gain.value = 1.0;

  // Create stereo panner
  this.stereoPanner = this.audioContext.createStereoPanner();
  this.stereoPanner.pan.value = 0;

  // Create limiter (another compressor with more aggressive settings)
  this.limiter = this.audioContext.createDynamicsCompressor();
  this.limiter.threshold.value = this.dynamics.limiter.threshold;
  this.limiter.knee.value = this.dynamics.limiter.knee;
  this.limiter.ratio.value = this.dynamics.limiter.ratio;
  this.limiter.attack.value = this.dynamics.limiter.attack;
  this.limiter.release.value = this.dynamics.limiter.release;

  // Connect the processing chain
  this.compressor.connect(this.masterGain);
  this.masterGain.connect(this.stereoPanner);
  this.stereoPanner.connect(this.limiter);
  this.limiter.connect(this.audioContext.destination);
}

updateDynamicsSettings(settings)

Updates the dynamics processing settings for the audio chain.

Parameters

  • settings - Object containing compressor and limiter settings

Example

beatBlock.updateDynamicsSettings({
  compressor: {
    threshold: -20.0,  // Lower threshold for more compression
    knee: 10.0,
    ratio: 4.0,        // Higher ratio for more aggressive compression
    attack: 0.005,
    release: 0.1       // Faster release
  },
  limiter: {
    threshold: -1.0,   // Higher threshold to prevent distortion
    knee: 0.0,
    ratio: 20.0,
    attack: 0.001,     // Very fast attack to catch transients
    release: 0.1
  }
});

Audio Encoding & Decoding

Opus Decoding

BeatBlocks uses the ogg-opus-decoder library to decode Opus audio files for playback.

Implementation

async decodeAudioData(arrayBuffer, path) {
  try {
    // Check if it's an Opus file
    const isOpus = path.toLowerCase().endsWith('.opus');
    
    if (isOpus) {
      // Use Opus decoder
      const decoder = new OpusDecoder();
      await decoder.ready;
      
      // Decode the Opus data
      decoder.decode(new Uint8Array(arrayBuffer));
      const decodedData = decoder.decode_float();
      
      // Create audio buffer from decoded data
      const audioBuffer = this.audioContext.createBuffer(
        1, // Mono channel
        decodedData.length,
        48000 // Opus standard sample rate
      );
      
      // Copy decoded data to audio buffer
      const channelData = audioBuffer.getChannelData(0);
      channelData.set(decodedData);
      
      return audioBuffer;
    } else {
      // Use standard Web Audio API decoding for other formats
      return await this.audioContext.decodeAudioData(arrayBuffer);
    }
  } catch (error) {
    console.error(`Failed to decode audio data: ${error}`);
    throw new Error(`Failed to decode audio data: ${error.message}`);
  }
}

MP3 Encoding

BeatBlocks uses the LAME MP3 encoder for exporting compositions as MP3 files.

Implementation

encodeMP3(audioBuffer, options = {}) {
  const { bitRate = 128, sampleRate = 44100 } = options;
  
  // Create MP3 encoder
  const encoder = new lamejs.Mp3Encoder(
    audioBuffer.numberOfChannels,
    sampleRate,
    bitRate
  );
  
  // Convert audio buffer to samples
  const samples = [];
  for (let i = 0; i < audioBuffer.numberOfChannels; i++) {
    samples.push(audioBuffer.getChannelData(i));
  }
  
  // Convert to Int16 format required by LAME
  const int16Samples = [];
  for (let i = 0; i < samples[0].length; i++) {
    for (let channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
      const sample = samples[channel][i];
      const int16Sample = Math.max(-1, Math.min(1, sample)) * 0x7FFF;
      int16Samples.push(int16Sample);
    }
  }
  
  // Encode to MP3
  const mp3Data = encoder.encodeBuffer(new Int16Array(int16Samples));
  const mp3End = encoder.flush();
  
  // Combine all MP3 data
  const mp3Buffer = new Uint8Array(mp3Data.length + mp3End.length);
  mp3Buffer.set(mp3Data);
  mp3Buffer.set(mp3End, mp3Data.length);
  
  return new Blob([mp3Buffer], { type: 'audio/mp3' });
}

Audio Export

downloadSong()

Exports the current composition as an audio file and triggers a download.

Returns

Promise<void> - Resolves when the export is complete

Implementation

async downloadSong() {
  try {
    this.emit('info', "Preparing to export song...");
    
    // Create offline audio context for rendering
    const offlineCtx = new OfflineAudioContext({
      numberOfChannels: 2,
      length: 44100 * this.getTotalDuration(),
      sampleRate: 44100
    });
    
    // Recreate the song in the offline context
    await this.renderToOfflineContext(offlineCtx);
    
    // Render the audio
    this.emit('info', "Rendering audio...");
    const renderedBuffer = await offlineCtx.startRendering();
    
    // Encode as WAV
    this.emit('info', "Encoding audio...");
    const wavBlob = this.encodeWAV(renderedBuffer);
    
    // Create download link
    const url = URL.createObjectURL(wavBlob);
    const a = document.createElement('a');
    a.href = url;
    a.download = `${this.details.title || 'beatblock'}.wav`;
    
    // Trigger download
    document.body.appendChild(a);
    a.click();
    document.body.removeChild(a);
    URL.revokeObjectURL(url);
    
    this.emit('info', "Download complete!");
  } catch (error) {
    this.emit('error', `Export failed: ${error.message}`);
    throw error;
  }
}

Example

// Add a download button to your UI
const downloadButton = document.getElementById('download-button');
downloadButton.addEventListener('click', async () => {
  try {
    // Show loading indicator
    downloadButton.disabled = true;
    downloadButton.textContent = 'Exporting...';
    
    // Export the song
    await beatBlock.downloadSong();
    
    // Reset button
    downloadButton.disabled = false;
    downloadButton.textContent = 'Download';
  } catch (error) {
    console.error('Export failed:', error);
    alert('Failed to export song: ' + error.message);
    
    // Reset button
    downloadButton.disabled = false;
    downloadButton.textContent = 'Download';
  }
});

encodeWAV(audioBuffer)

Encodes an audio buffer as a WAV file.

Parameters

  • audioBuffer - The AudioBuffer to encode

Returns

Blob - A Blob containing the WAV file

Implementation

encodeWAV(audioBuffer) {
  const numChannels = audioBuffer.numberOfChannels;
  const sampleRate = audioBuffer.sampleRate;
  const bitDepth = 16; // 16-bit audio
  const bytesPerSample = bitDepth / 8;
  
  // Interleave channels
  let interleaved;
  if (numChannels === 2) {
    const left = audioBuffer.getChannelData(0);
    const right = audioBuffer.getChannelData(1);
    interleaved = new Float32Array(left.length * 2);
    
    for (let i = 0; i < left.length; i++) {
      interleaved[i * 2] = left[i];
      interleaved[i * 2 + 1] = right[i];
    }
  } else {
    interleaved = audioBuffer.getChannelData(0);
  }
  
  // Create WAV header
  const dataSize = interleaved.length * bytesPerSample;
  const buffer = new ArrayBuffer(44 + dataSize);
  const view = new DataView(buffer);
  
  // Write WAV header
  // "RIFF" chunk descriptor
  this.writeString(view, 0, 'RIFF');
  view.setUint32(4, 36 + dataSize, true);
  this.writeString(view, 8, 'WAVE');
  
  // "fmt " sub-chunk
  this.writeString(view, 12, 'fmt ');
  view.setUint32(16, 16, true); // fmt chunk size
  view.setUint16(20, 1, true); // Audio format (1 = PCM)
  view.setUint16(22, numChannels, true);
  view.setUint32(24, sampleRate, true);
  view.setUint32(28, sampleRate * numChannels * bytesPerSample, true); // Byte rate
  view.setUint16(32, numChannels * bytesPerSample, true); // Block align
  view.setUint16(34, bitDepth, true);
  
  // "data" sub-chunk
  this.writeString(view, 36, 'data');
  view.setUint32(40, dataSize, true);
  
  // Write audio data
  this.floatTo16BitPCM(view, 44, interleaved);
  
  return new Blob([buffer], { type: 'audio/wav' });
}

Audio Visualization

getAudioNode()

Returns the master audio node for connecting to analyzers and visualizers.

Returns

AudioNode - The master gain node

Example

// Create an analyzer for visualization
const analyzer = audioContext.createAnalyser();
analyzer.fftSize = 2048;
analyzer.smoothingTimeConstant = 0.8;

// Connect the BeatBlock output to the analyzer
beatBlock.getAudioNode().connect(analyzer);

// Set up visualization
function visualize() {
  // Create data arrays
  const bufferLength = analyzer.frequencyBinCount;
  const dataArray = new Uint8Array(bufferLength);
  
  // Get a canvas context
  const canvas = document.getElementById('visualizer');
  const canvasCtx = canvas.getContext('2d');
  
  // Animation function
  function draw() {
    requestAnimationFrame(draw);
    
    // Get frequency data
    analyzer.getByteFrequencyData(dataArray);
    
    // Clear canvas
    canvasCtx.clearRect(0, 0, canvas.width, canvas.height);
    
    // Draw visualization
    const barWidth = (canvas.width / bufferLength) * 2.5;
    let x = 0;
    
    for (let i = 0; i < bufferLength; i++) {
      const barHeight = dataArray[i] / 2;
      
      canvasCtx.fillStyle = `rgb(${barHeight + 100}, 50, 50)`;
      canvasCtx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
      
      x += barWidth + 1;
    }
  }
  
  draw();
}

// Start visualization when playback begins
beatBlock.on('play', visualize);