import React, { useState, useEffect, useRef, useCallback } from 'react'; import { Play, Square, RefreshCw, Volume2, Settings, Zap } from 'lucide-react'; /* --- AUDIO ENGINE --- Uses Web Audio API for synthesis. No samples required. */ const AudioEngine = { ctx: null, init() { if (!this.ctx) { this.ctx = new (window.AudioContext || window.webkitAudioContext)(); } if (this.ctx.state === 'suspended') { this.ctx.resume(); } }, playKick(time) { if (!this.ctx) return; const osc = this.ctx.createOscillator(); const gain = this.ctx.createGain(); osc.connect(gain); gain.connect(this.ctx.destination); osc.frequency.setValueAtTime(150, time); osc.frequency.exponentialRampToValueAtTime(0.01, time + 0.5); gain.gain.setValueAtTime(0.8, time); gain.gain.exponentialRampToValueAtTime(0.01, time + 0.5); osc.start(time); osc.stop(time + 0.5); }, playSnare(time) { if (!this.ctx) return; // Noise buffer for "snap" const bufferSize = this.ctx.sampleRate * 2; // 2 seconds const buffer = this.ctx.createBuffer(1, bufferSize, this.ctx.sampleRate); const data = buffer.getChannelData(0); for (let i = 0; i < bufferSize; i++) { data[i] = Math.random() * 2 - 1; } const noise = this.ctx.createBufferSource(); noise.buffer = buffer; const noiseFilter = this.ctx.createBiquadFilter(); noiseFilter.type = 'highpass'; noiseFilter.frequency.value = 1000; const noiseGain = this.ctx.createGain(); noise.connect(noiseFilter); noiseFilter.connect(noiseGain); noiseGain.connect(this.ctx.destination); // Envelope noiseGain.gain.setValueAtTime(0.5, time); noiseGain.gain.exponentialRampToValueAtTime(0.01, time + 0.2); noise.start(time); noise.stop(time + 0.2); // Oscillator for "body" const osc = this.ctx.createOscillator(); osc.type = 'triangle'; const oscGain = this.ctx.createGain(); osc.connect(oscGain); oscGain.connect(this.ctx.destination); osc.frequency.setValueAtTime(250, time); oscGain.gain.setValueAtTime(0.3, time); oscGain.gain.exponentialRampToValueAtTime(0.01, time + 0.1); osc.start(time); osc.stop(time + 0.1); }, playHiHat(time) { if (!this.ctx) return; // Create short noise burst const bufferSize = this.ctx.sampleRate * 2; const buffer = this.ctx.createBuffer(1, bufferSize, this.ctx.sampleRate); const data = buffer.getChannelData(0); for (let i = 0; i < bufferSize; i++) { data[i] = Math.random() * 2 - 1; } const noise = this.ctx.createBufferSource(); noise.buffer = buffer; const bandpass = this.ctx.createBiquadFilter(); bandpass.type = 'bandpass'; bandpass.frequency.value = 10000; const highpass = this.ctx.createBiquadFilter(); highpass.type = 'highpass'; highpass.frequency.value = 7000; const gain = this.ctx.createGain(); noise.connect(bandpass); bandpass.connect(highpass); highpass.connect(gain); gain.connect(this.ctx.destination); gain.gain.setValueAtTime(0.3, time); gain.gain.exponentialRampToValueAtTime(0.01, time + 0.05); noise.start(time); noise.stop(time + 0.05); }, playClap(time) { if (!this.ctx) return; const noise = this.ctx.createBufferSource(); const bufferSize = this.ctx.sampleRate * 2; const buffer = this.ctx.createBuffer(1, bufferSize, this.ctx.sampleRate); const data = buffer.getChannelData(0); for (let i = 0; i < bufferSize; i++) { data[i] = Math.random() * 2 - 1; } noise.buffer = buffer; const filter = this.ctx.createBiquadFilter(); filter.type = 'bandpass'; filter.frequency.value = 1500; filter.Q.value = 1; const gain = this.ctx.createGain(); noise.connect(filter); filter.connect(gain); gain.connect(this.ctx.destination); // Clap envelope simulates multiple hands hitting slightly apart const duration = 0.2; gain.gain.setValueAtTime(0, time); gain.gain.linearRampToValueAtTime(0.4, time + 0.01); gain.gain.exponentialRampToValueAtTime(0.01, time + duration); noise.start(time); noise.stop(time + duration); } }; /* --- COMPONENT --- */ export default function App() { const [isPlaying, setIsPlaying] = useState(false); const [bpm, setBpm] = useState(120); const [currentStep, setCurrentStep] = useState(0); // Grid State: 4 Instruments x 16 Steps // 0: Kick, 1: Snare, 2: HiHat, 3: Clap const [grid, setGrid] = useState([ [1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0], // Kick [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0], // Snare [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], // HiHat [0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0], // Clap ]); // Audio Scheduling Refs const nextNoteTimeRef = useRef(0); const currentStepRef = useRef(0); const timerIDRef = useRef(null); const lookahead = 25.0; // ms const scheduleAheadTime = 0.1; // seconds const instruments = [ { name: 'Kick', color: 'bg-purple-500', play: (t) => AudioEngine.playKick(t) }, { name: 'Snare', color: 'bg-pink-500', play: (t) => AudioEngine.playSnare(t) }, { name: 'HiHat', color: 'bg-yellow-400', play: (t) => AudioEngine.playHiHat(t) }, { name: 'Clap', color: 'bg-cyan-400', play: (t) => AudioEngine.playClap(t) }, ]; // Scheduler Loop const nextNote = () => { const secondsPerBeat = 60.0 / bpm; const secondsPer16th = secondsPerBeat / 4; // 16th notes nextNoteTimeRef.current += secondsPer16th; currentStepRef.current = (currentStepRef.current + 1) % 16; }; const scheduleNote = (stepNumber, time) => { // Schedule Audio grid.forEach((row, instrumentIndex) => { if (row[stepNumber]) { instruments[instrumentIndex].play(time); } }); // Schedule Visual Update (using requestAnimationFrame for smoother UI sync) // We use a small timeout to sync the visual "flash" with the audio time const drawTime = (time - AudioEngine.ctx.currentTime) * 1000; setTimeout(() => { setCurrentStep(stepNumber); }, Math.max(0, drawTime)); }; const scheduler = useCallback(() => { if (!AudioEngine.ctx) return; // While there are notes that will need to play before the next interval, // schedule them and advance the pointer. while (nextNoteTimeRef.current < AudioEngine.ctx.currentTime + scheduleAheadTime) { scheduleNote(currentStepRef.current, nextNoteTimeRef.current); nextNote(); } timerIDRef.current = setTimeout(scheduler, lookahead); }, [bpm, grid]); // Dependencies are tricky here in setTimeout loop, usually refs are better for dynamic data // We need to keep 'grid' and 'bpm' fresh in the scheduler without resetting the loop // So we use refs for the scheduler logic if we wanted perfection, but for this complexity, // let's just make sure we don't stale-closure. // Actually, standard practice: The scheduler function recreates on render, so the timeout needs to persist. // Let's use a useEffect to manage the playback state. useEffect(() => { if (isPlaying) { if (!AudioEngine.ctx) AudioEngine.init(); AudioEngine.ctx.resume(); // Reset time if starting fresh if (nextNoteTimeRef.current < AudioEngine.ctx.currentTime) { nextNoteTimeRef.current = AudioEngine.ctx.currentTime + 0.05; } scheduler(); } else { if (timerIDRef.current) clearTimeout(timerIDRef.current); } return () => clearTimeout(timerIDRef.current); }, [isPlaying, scheduler]); const toggleStep = (instrumentIndex, stepIndex) => { const newGrid = [...grid]; newGrid[instrumentIndex] = [...newGrid[instrumentIndex]]; newGrid[instrumentIndex][stepIndex] = !newGrid[instrumentIndex][stepIndex] ? 1 : 0; setGrid(newGrid); }; const handlePlay = () => { AudioEngine.init(); setIsPlaying(!isPlaying); }; const clearPattern = () => { setGrid(grid.map(row => row.map(() => 0))); setIsPlaying(false); setCurrentStep(0); currentStepRef.current = 0; }; return (
Built with Web Audio API. No external samples used.