From 6e3d0e8918cab64d4e6cdea6fa04701d23751bf2 Mon Sep 17 00:00:00 2001 From: "gpt-engineer-app[bot]" <159125892+gpt-engineer-app[bot]@users.noreply.github.com> Date: Sun, 21 Dec 2025 14:30:47 +0000 Subject: [PATCH] Changes --- src/components/ControlPanel.tsx | 14 ++----- src/components/OscilloscopeDisplay.tsx | 54 +++++++++++++++++--------- src/contexts/AudioAnalyzerContext.tsx | 9 ++++- 3 files changed, 48 insertions(+), 29 deletions(-) diff --git a/src/components/ControlPanel.tsx b/src/components/ControlPanel.tsx index ddecc4c..d5874bd 100755 --- a/src/components/ControlPanel.tsx +++ b/src/components/ControlPanel.tsx @@ -82,19 +82,13 @@ export function ControlPanel({
- - -
-
- -
diff --git a/src/components/OscilloscopeDisplay.tsx b/src/components/OscilloscopeDisplay.tsx index 97e3506..0a39a02 100755 --- a/src/components/OscilloscopeDisplay.tsx +++ b/src/components/OscilloscopeDisplay.tsx @@ -57,6 +57,7 @@ export function OscilloscopeDisplay({ const lineThickness = liveSettings?.lineThickness ?? 2; const showGrid = liveSettings?.showGrid ?? true; const glowIntensity = liveSettings?.glowIntensity ?? 1; + const liveDisplayMode = liveSettings?.displayMode ?? 'combined'; const drawGraticule = useCallback((ctx: CanvasRenderingContext2D) => { if (!showGrid) return; @@ -122,10 +123,6 @@ export function OscilloscopeDisplay({ liveData[i] = (dataArray[i] - 128) / 128; // Normalize to -1 to 1 } - samplesPerFrame = liveData.length; - startSample = 0; - endSample = liveData.length; - // Apply glow effect if (glowIntensity > 0) { ctx.shadowColor = primaryColor; @@ -134,28 +131,49 @@ export function OscilloscopeDisplay({ ctx.shadowBlur = 0; } - // Draw live data directly ctx.strokeStyle = primaryColor; ctx.lineWidth = lineThickness; - ctx.beginPath(); - const sliceWidth = WIDTH / samplesPerFrame; - let x = 0; + // Draw based on live display mode + if (liveDisplayMode === 'all') { + // XY / Lissajous mode - treat odd/even samples as L/R + ctx.beginPath(); + const centerX = WIDTH / 2; + const centerY = HEIGHT / 2; + const scale = Math.min(WIDTH, HEIGHT) * 0.4; - for (let i = 0; i < samplesPerFrame; i++) { - const v = liveData[i]; - const y = (v * HEIGHT) / 2 + HEIGHT / 2; + for (let i = 0; i < liveData.length - 1; i += 2) { + const x = centerX + liveData[i] * scale; + const y = centerY - liveData[i + 1] * scale; - if (i === 0) { - ctx.moveTo(x, y); - } else { - ctx.lineTo(x, y); + if (i === 0) { + ctx.moveTo(x, y); + } else { + ctx.lineTo(x, y); + } } + ctx.stroke(); + } else { + // Combined waveform mode (default) + ctx.beginPath(); + const sliceWidth = WIDTH / liveData.length; + let x = 0; - x += sliceWidth; + for (let i = 0; i < liveData.length; i++) { + const v = liveData[i]; + const y = (v * HEIGHT) / 2 + HEIGHT / 2; + + if (i === 0) { + ctx.moveTo(x, y); + } else { + ctx.lineTo(x, y); + } + + x += sliceWidth; + } + ctx.stroke(); } - ctx.stroke(); ctx.shadowBlur = 0; // Request next frame for real-time @@ -337,7 +355,7 @@ export function OscilloscopeDisplay({ } animationRef.current = requestAnimationFrame(drawFrame); - }, [audioData, micAnalyzer, liveAnalyzer, mode, drawGraticule, onPlaybackEnd, isPlaying, playbackSpeed, isLooping, seekPosition, lineThickness, glowIntensity]); + }, [audioData, micAnalyzer, liveAnalyzer, mode, drawGraticule, onPlaybackEnd, isPlaying, playbackSpeed, isLooping, seekPosition, lineThickness, glowIntensity, liveDisplayMode]); // Initialize canvas useEffect(() => { diff --git a/src/contexts/AudioAnalyzerContext.tsx b/src/contexts/AudioAnalyzerContext.tsx index 37f8e2f..879adb9 100644 --- a/src/contexts/AudioAnalyzerContext.tsx +++ b/src/contexts/AudioAnalyzerContext.tsx @@ -16,8 +16,9 @@ export const AudioAnalyzerProvider = ({ children }: { children: ReactNode }) => const analyzerRef = useRef(null); const sourceMapRef = useRef>(new Map()); const [isReady, setIsReady] = useState(false); + const [, forceUpdate] = useState(0); - // Initialize audio context lazily on first user interaction + // Initialize audio context - call immediately but handle suspended state const initAudioContext = useCallback(() => { if (audioContextRef.current) return audioContextRef.current; @@ -33,6 +34,7 @@ export const AudioAnalyzerProvider = ({ children }: { children: ReactNode }) => analyzerRef.current = analyzer; setIsReady(true); + forceUpdate(n => n + 1); // Force re-render to update context value return ctx; } catch (e) { console.error('Failed to create AudioContext:', e); @@ -40,6 +42,11 @@ export const AudioAnalyzerProvider = ({ children }: { children: ReactNode }) => } }, []); + // Initialize immediately on mount + useEffect(() => { + initAudioContext(); + }, [initAudioContext]); + // Connect an audio element to the analyzer const connectAudioElement = useCallback((element: HTMLAudioElement) => { const ctx = initAudioContext();