import { useEffect, useRef, useCallback } from 'react'; import type { AudioData } from '@/hooks/useAudioAnalyzer'; import type { OscilloscopeMode } from '@/hooks/useOscilloscopeRenderer'; import { useAudioAnalyzer as useSharedAudioAnalyzer } from '@/contexts/AudioAnalyzerContext'; import type { LiveDisplaySettings, VisualizationMode } from './OscilloscopeControls'; interface OscilloscopeDisplayProps { audioData: AudioData | null; micAnalyzer: AnalyserNode | null; mode: OscilloscopeMode; isPlaying: boolean; playbackSpeed: number; isLooping: boolean; audioElementRef?: React.RefObject; onPlaybackEnd?: () => void; onSeek?: (position: number) => void; liveSettings?: LiveDisplaySettings; } const WIDTH = 800; const HEIGHT = 600; const FPS = 60; // Get computed CSS color from theme const getThemeColor = (cssVar: string, fallback: string): string => { if (typeof window === 'undefined') return fallback; const root = document.documentElement; const value = getComputedStyle(root).getPropertyValue(cssVar).trim(); if (value) { return `hsl(${value})`; } return fallback; }; export function OscilloscopeDisplay({ audioData, micAnalyzer, mode, isPlaying, playbackSpeed, isLooping, audioElementRef, onPlaybackEnd, onSeek, liveSettings }: OscilloscopeDisplayProps) { const canvasRef = useRef(null); const animationRef = useRef(null); const { analyzerNode: sharedAnalyzer } = useSharedAudioAnalyzer(); // Use shared analyzer for live audio (music player, sound effects) const liveAnalyzer = sharedAnalyzer || micAnalyzer; // Get settings with defaults const lineThickness = liveSettings?.lineThickness ?? 2; const showGrid = liveSettings?.showGrid ?? true; const glowIntensity = liveSettings?.glowIntensity ?? 1; const liveDisplayMode = liveSettings?.displayMode ?? 'combined'; const visualizationMode = liveSettings?.visualizationMode ?? 'waveform'; const drawGraticule = useCallback((ctx: CanvasRenderingContext2D) => { if (!showGrid) return; const primaryColor = getThemeColor('--primary', '#00ff00'); ctx.strokeStyle = primaryColor; ctx.globalAlpha = 0.3; ctx.lineWidth = 1; // Horizontal center line (X axis) ctx.beginPath(); ctx.moveTo(0, HEIGHT / 2); ctx.lineTo(WIDTH, HEIGHT / 2); ctx.stroke(); // Vertical center line (Y axis) ctx.beginPath(); ctx.moveTo(WIDTH / 2, 0); ctx.lineTo(WIDTH / 2, HEIGHT); ctx.stroke(); ctx.globalAlpha = 1; }, [showGrid]); // Draw spectrum bars const drawSpectrum = useCallback((ctx: CanvasRenderingContext2D, frequencyData: Uint8Array, yOffset: number = 0, heightRatio: number = 1) => { const primaryColor = getThemeColor('--primary', '#00ff00'); const accentColor = getThemeColor('--accent', '#00ccff'); const barCount = 64; const barWidth = (WIDTH / barCount) - 2; const maxBarHeight = (HEIGHT * heightRatio) * 0.8; // Sample frequency data for the bar count const step = Math.floor(frequencyData.length / barCount); for (let i = 0; i < barCount; i++) { const dataIndex = i * step; const value = frequencyData[dataIndex] / 255; const barHeight = value * maxBarHeight; const x = i * (barWidth + 2); const y = yOffset + (HEIGHT * heightRatio) - barHeight; // Create gradient for each bar const gradient = ctx.createLinearGradient(x, y + barHeight, x, y); gradient.addColorStop(0, primaryColor); gradient.addColorStop(1, accentColor); ctx.fillStyle = gradient; ctx.fillRect(x, y, barWidth, barHeight); } }, []); const drawFrame = useCallback(() => { if (!canvasRef.current) return; // Always allow drawing if we have live analyzer, even without audioData const hasLiveSource = liveAnalyzer || micAnalyzer; if (!audioData && !hasLiveSource) return; const canvas = canvasRef.current; const ctx = canvas.getContext('2d'); if (!ctx) return; const primaryColor = getThemeColor('--primary', '#00ff00'); const backgroundColor = getThemeColor('--background', '#000000'); let samplesPerFrame: number = 0; let endSample: number = 0; let samplesToAdvance: number = 0; // Priority: micAnalyzer > liveAnalyzer (shared) > audioData (file) const activeAnalyzer = micAnalyzer || liveAnalyzer; if (activeAnalyzer && !audioData) { // Real-time audio data (mic or music player) const bufferLength = activeAnalyzer.frequencyBinCount; const timeDomainData = new Uint8Array(bufferLength); const frequencyData = new Uint8Array(bufferLength); activeAnalyzer.getByteTimeDomainData(timeDomainData); activeAnalyzer.getByteFrequencyData(frequencyData); // Clear to background color ctx.fillStyle = backgroundColor; ctx.fillRect(0, 0, WIDTH, HEIGHT); // Draw graticule first (only for waveform modes) if (visualizationMode !== 'spectrum') { drawGraticule(ctx); } // Convert to Float32Array-like for consistency const liveData = new Float32Array(timeDomainData.length); for (let i = 0; i < timeDomainData.length; i++) { liveData[i] = (timeDomainData[i] - 128) / 128; // Normalize to -1 to 1 } // Apply glow effect if (glowIntensity > 0) { ctx.shadowColor = primaryColor; ctx.shadowBlur = glowIntensity * 8; } else { ctx.shadowBlur = 0; } ctx.strokeStyle = primaryColor; ctx.lineWidth = lineThickness; // Draw based on visualization mode if (visualizationMode === 'spectrum') { // Spectrum bars only ctx.shadowBlur = 0; drawSpectrum(ctx, frequencyData, 0, 1); } else if (visualizationMode === 'both') { // Waveform on top half, spectrum on bottom half // Draw waveform if (liveDisplayMode === 'all') { // XY mode in top half ctx.beginPath(); const centerX = WIDTH / 2; const centerY = HEIGHT / 4; const scale = Math.min(WIDTH, HEIGHT / 2) * 0.35; for (let i = 0; i < liveData.length - 1; i += 2) { const x = centerX + liveData[i] * scale; const y = centerY - liveData[i + 1] * scale; if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); } ctx.stroke(); } else { // Combined waveform in top half ctx.beginPath(); const sliceWidth = WIDTH / liveData.length; let x = 0; for (let i = 0; i < liveData.length; i++) { const v = liveData[i]; const y = (v * HEIGHT * 0.4) / 2 + HEIGHT / 4; if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); x += sliceWidth; } ctx.stroke(); } // Spectrum in bottom half ctx.shadowBlur = 0; drawSpectrum(ctx, frequencyData, HEIGHT / 2, 0.5); // Divider line ctx.strokeStyle = 'rgba(255,255,255,0.1)'; ctx.beginPath(); ctx.moveTo(0, HEIGHT / 2); ctx.lineTo(WIDTH, HEIGHT / 2); ctx.stroke(); } else { // Waveform only (default) if (liveDisplayMode === 'all') { // XY / Lissajous mode - treat odd/even samples as L/R ctx.beginPath(); const centerX = WIDTH / 2; const centerY = HEIGHT / 2; const scale = Math.min(WIDTH, HEIGHT) * 0.4; for (let i = 0; i < liveData.length - 1; i += 2) { const x = centerX + liveData[i] * scale; const y = centerY - liveData[i + 1] * scale; if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); } ctx.stroke(); } else { // Combined waveform mode ctx.beginPath(); const sliceWidth = WIDTH / liveData.length; let x = 0; for (let i = 0; i < liveData.length; i++) { const v = liveData[i]; const y = (v * HEIGHT) / 2 + HEIGHT / 2; if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); x += sliceWidth; } ctx.stroke(); } } ctx.shadowBlur = 0; // Request next frame for real-time animationRef.current = requestAnimationFrame(drawFrame); return; } // File playback mode - need audioData if (!audioData) return; // File playback mode - sync with audio element if available const baseSamplesPerFrame = Math.floor(audioData.sampleRate / FPS); samplesPerFrame = Math.floor(baseSamplesPerFrame * playbackSpeed); samplesToAdvance = samplesPerFrame; // Get current position from audio element (real-time sync at 60fps) let startSample: number; if (audioElementRef?.current && !audioElementRef.current.paused) { const currentTime = audioElementRef.current.currentTime; startSample = Math.floor((currentTime / audioData.duration) * audioData.leftChannel.length); } else { // Fallback: just show first frame when paused startSample = 0; } endSample = Math.min(startSample + samplesPerFrame, audioData.leftChannel.length); // Clear to background color ctx.fillStyle = backgroundColor; ctx.fillRect(0, 0, WIDTH, HEIGHT); // Draw graticule first drawGraticule(ctx); // Apply glow effect if (glowIntensity > 0) { ctx.shadowColor = primaryColor; ctx.shadowBlur = glowIntensity * 8; } else { ctx.shadowBlur = 0; } ctx.lineWidth = lineThickness; ctx.lineCap = 'round'; const leftColor = primaryColor; const rightColor = getThemeColor('--accent', '#00ccff'); const xyColor = getThemeColor('--secondary', '#ff8800'); const dividerColor = 'rgba(255,255,255,0.1)'; if (mode === 'combined') { // Combined: both channels merged ctx.strokeStyle = leftColor; ctx.beginPath(); const samplesPerPixel = samplesPerFrame / WIDTH; const centerY = HEIGHT / 2; for (let x = 0; x < WIDTH; x++) { const sampleIndex = Math.floor(startSample + x * samplesPerPixel); if (sampleIndex >= audioData.leftChannel.length) break; const sample = (audioData.leftChannel[sampleIndex] + audioData.rightChannel[sampleIndex]) / 2; const y = centerY - sample * (HEIGHT * 0.4); if (x === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); } ctx.stroke(); } else if (mode === 'separate') { // Separate: Left on top, Right on bottom const halfHeight = HEIGHT / 2; const samplesPerPixel = samplesPerFrame / WIDTH; // Left channel (top) ctx.strokeStyle = leftColor; ctx.beginPath(); const leftCenterY = halfHeight / 2; for (let x = 0; x < WIDTH; x++) { const sampleIndex = Math.floor(startSample + x * samplesPerPixel); if (sampleIndex >= audioData.leftChannel.length) break; const sample = audioData.leftChannel[sampleIndex]; const y = leftCenterY - sample * (halfHeight * 0.35); if (x === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); } ctx.stroke(); // Right channel (bottom) ctx.strokeStyle = rightColor; ctx.beginPath(); const rightCenterY = halfHeight + halfHeight / 2; for (let x = 0; x < WIDTH; x++) { const sampleIndex = Math.floor(startSample + x * samplesPerPixel); if (sampleIndex >= audioData.rightChannel.length) break; const sample = audioData.rightChannel[sampleIndex]; const y = rightCenterY - sample * (halfHeight * 0.35); if (x === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); } ctx.stroke(); // Divider ctx.strokeStyle = dividerColor; ctx.beginPath(); ctx.moveTo(0, halfHeight); ctx.lineTo(WIDTH, halfHeight); ctx.stroke(); } else if (mode === 'all') { // All: L/R on top row, XY on bottom const topHeight = HEIGHT / 2; const bottomHeight = HEIGHT / 2; const halfWidth = WIDTH / 2; const samplesPerPixel = samplesPerFrame / halfWidth; // Left channel (top-left) ctx.strokeStyle = leftColor; ctx.beginPath(); const leftCenterY = topHeight / 2; for (let x = 0; x < halfWidth; x++) { const sampleIndex = Math.floor(startSample + x * samplesPerPixel); if (sampleIndex >= audioData.leftChannel.length) break; const sample = audioData.leftChannel[sampleIndex]; const y = leftCenterY - sample * (topHeight * 0.35); if (x === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); } ctx.stroke(); // Right channel (top-right) ctx.strokeStyle = rightColor; ctx.beginPath(); const rightCenterY = topHeight / 2; for (let x = 0; x < halfWidth; x++) { const sampleIndex = Math.floor(startSample + x * samplesPerPixel); if (sampleIndex >= audioData.rightChannel.length) break; const sample = audioData.rightChannel[sampleIndex]; const y = rightCenterY - sample * (topHeight * 0.35); if (x === 0) ctx.moveTo(halfWidth + x, y); else ctx.lineTo(halfWidth + x, y); } ctx.stroke(); // XY mode (bottom half) ctx.strokeStyle = xyColor; ctx.beginPath(); const xyCenterX = WIDTH / 2; const xyCenterY = topHeight + bottomHeight / 2; const xyScale = Math.min(halfWidth, bottomHeight) * 0.35; for (let i = startSample; i < endSample; i++) { const x = xyCenterX + audioData.leftChannel[i] * xyScale; const y = xyCenterY - audioData.rightChannel[i] * xyScale; if (i === startSample) ctx.moveTo(x, y); else ctx.lineTo(x, y); } ctx.stroke(); // Dividers ctx.strokeStyle = dividerColor; ctx.beginPath(); ctx.moveTo(0, topHeight); ctx.lineTo(WIDTH, topHeight); ctx.stroke(); ctx.beginPath(); ctx.moveTo(halfWidth, 0); ctx.lineTo(halfWidth, topHeight); ctx.stroke(); } ctx.shadowBlur = 0; // Check if audio ended (when syncing to audio element) if (audioElementRef?.current) { if (audioElementRef.current.ended && !isLooping) { onPlaybackEnd?.(); return; } } animationRef.current = requestAnimationFrame(drawFrame); }, [audioData, micAnalyzer, liveAnalyzer, mode, drawGraticule, drawSpectrum, onPlaybackEnd, isPlaying, playbackSpeed, isLooping, lineThickness, glowIntensity, liveDisplayMode, visualizationMode, audioElementRef]); // Initialize canvas useEffect(() => { if (!canvasRef.current) return; const ctx = canvasRef.current.getContext('2d'); if (ctx) { ctx.fillStyle = '#000000'; ctx.fillRect(0, 0, WIDTH, HEIGHT); drawGraticule(ctx); } }, [drawGraticule]); // Handle playback - start animation for file playback or live audio useEffect(() => { const hasLiveSource = liveAnalyzer || micAnalyzer; if (isPlaying && audioData) { // File playback animationRef.current = requestAnimationFrame(drawFrame); } else if (hasLiveSource && !audioData) { // Live audio visualization (music player, sound effects) animationRef.current = requestAnimationFrame(drawFrame); } else { if (animationRef.current) { cancelAnimationFrame(animationRef.current); } } return () => { if (animationRef.current) { cancelAnimationFrame(animationRef.current); } }; }, [isPlaying, audioData, liveAnalyzer, micAnalyzer, drawFrame]); const getModeLabel = () => { switch (mode) { case 'combined': return 'L+R'; case 'separate': return 'L / R'; case 'all': return 'ALL'; default: return ''; } }; return (
{ if (!audioData) return; const rect = canvasRef.current?.getBoundingClientRect(); if (!rect) return; const x = e.clientX - rect.left; const clickPosition = x / rect.width; onSeek?.(Math.max(0, Math.min(1, clickPosition))); }} /> {/* Mode indicator */}
{getModeLabel()}
{/* Idle state - only show if no live audio and no file */} {!audioData && !liveAnalyzer && !micAnalyzer && (

NO SIGNAL

)}
); }