Music Visualizer

Real-time frequency spectrum visualization with interactive audio controls.

What this code does

- Web Audio API: creates audio context and analyser node for real-time frequency analysis.
- FFT Analysis: analyser.getByteFrequencyData() provides frequency spectrum data each frame.
- Frequency Bars: 64 animated bars represent different frequency ranges from low to high.
- Demo Audio: multiple oscillators generate rich harmonic content for visualization.
- Color Modes: rainbow, blue, red, green color schemes for frequency bars.
- Interactive Controls: sensitivity, color mode, rotation speed, and audio start/stop.
- Responsive Animation: bar heights and colors respond to audio amplitude and frequency.

JavaScript (plain)

// Music Visualizer with Web Audio API
const scene = new THREE.Scene()
scene.background = new THREE.Color(0x0a0a0a)

const camera = new THREE.PerspectiveCamera(75, width / height, 0.1, 1000)
camera.position.set(0, 5, 15)

const renderer = new THREE.WebGLRenderer({ antialias: true })
renderer.setSize(width, height)

document.querySelector('#app').appendChild(renderer.domElement)

// Audio setup
const audioContext = new (window.AudioContext || window.webkitAudioContext)()
const analyser = audioContext.createAnalyser()
analyser.fftSize = 256
const bufferLength = analyser.frequencyBinCount
const dataArray = new Uint8Array(bufferLength)

// Create demo audio oscillators
const oscillator = audioContext.createOscillator()
const gainNode = audioContext.createGain()
oscillator.type = 'sine'
oscillator.frequency.setValueAtTime(440, audioContext.currentTime)
gainNode.gain.setValueAtTime(0.1, audioContext.currentTime)
oscillator.connect(gainNode)
gainNode.connect(analyser)
analyser.connect(audioContext.destination)
oscillator.start()

// Create visualizer bars
const bars = []
const barCount = 64
const barWidth = 0.3
const barSpacing = 0.1
const totalWidth = barCount * (barWidth + barSpacing)
const startX = -totalWidth / 2

for (let i = 0; i < barCount; i++) {
  const geometry = new THREE.BoxGeometry(barWidth, 1, barWidth)
  const material = new THREE.MeshPhongMaterial({
    color: new THREE.Color().setHSL(i / barCount, 1.0, 0.5),
    emissive: new THREE.Color().setHSL(i / barCount, 1.0, 0.5),
    emissiveIntensity: 0.3
  })
  const bar = new THREE.Mesh(geometry, material)
  bar.position.x = startX + i * (barWidth + barSpacing)
  scene.add(bar)

  bars.push(bar)
}

// Lighting
const ambientLight = new THREE.AmbientLight(0x404040, 0.3)
scene.add(ambientLight)

const spotLight = new THREE.SpotLight(0xffffff, 1)
spotLight.position.set(0, 20, 10)
scene.add(spotLight)

function animate() {
  requestAnimationFrame(animate)

  // Update audio data
  analyser.getByteFrequencyData(dataArray)

  // Update bars
  bars.forEach((bar, i) => {
    const dataIndex = Math.floor((i / barCount) * bufferLength)
    const amplitude = dataArray[dataIndex] / 255
    bar.scale.y = Math.max(0.1, amplitude * 8)
    bar.position.y = bar.scale.y / 2
  })

  renderer.render(scene, camera)
}
animate()